diff --git a/404.html b/404.html index 47a685a4..8e35af51 100644 --- a/404.html +++ b/404.html @@ -7,13 +7,13 @@ - +
본문으로 건너뛰기

페이지를 찾을 수 없습니다.

원하는 페이지를 찾을 수 없습니다.

사이트 관리자에게 링크가 깨진 것을 알려주세요.

- + \ No newline at end of file diff --git a/assets/js/01a9496d.d6f459d0.js b/assets/js/01a9496d.7d5a9ac4.js similarity index 99% rename from assets/js/01a9496d.d6f459d0.js rename to assets/js/01a9496d.7d5a9ac4.js index db4da959..700552ab 100644 --- a/assets/js/01a9496d.d6f459d0.js +++ b/assets/js/01a9496d.7d5a9ac4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2416],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var a=r.createContext({}),s=function(e){var t=r.useContext(a),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(a.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,l=e.originalType,a=e.parentName,c=p(e,["components","mdxType","originalType","parentName"]),d=s(n),f=i,m=d["".concat(a,".").concat(f)]||d[f]||u[f]||l;return n?r.createElement(m,o(o({ref:t},c),{},{components:n})):r.createElement(m,o({ref:t},c))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var l=n.length,o=new Array(l);o[0]=f;var p={};for(var a in t)hasOwnProperty.call(t,a)&&(p[a]=t[a]);p.originalType=e,p[d]="string"==typeof e?e:i,o[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>a,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var r=n(7462),i=(n(7294),n(3905));const l={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},o=void 0,p={unversionedId:"introduction/intro",id:"introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/docs/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/docs/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/docs/introduction/levels"}},a={},s=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML+Ops",id:"1-mlops",level:3},{value:"Rule Based",id:"rule-based",level:4},{value:"Machine Learning",id:"machine-learning",level:4},{value:"Deep Learning",id:"deep-learning",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) \uacb0\ub860",id:"3-\uacb0\ub860",level:3}],c={toc:s},d="wrapper";function u(e){let{components:t,...l}=e;return(0,i.kt)(d,(0,r.Z)({},c,l,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,i.kt)("p",null,"2012\ub144 Alexnet \uc774\ud6c4 CV, NLP\ub97c \ube44\ub86f\ud558\uc5ec \ub370\uc774\ud130\uac00 \uc874\uc7ac\ud558\ub294 \ub3c4\uba54\uc778\uc774\ub77c\uba74 \uc5b4\ub514\uc11c\ub4e0 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \ub3c4\uc785\ud558\uace0\uc790 \ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc740 AI\ub77c\ub294 \ub2e8\uc5b4\ub85c \ubb36\uc774\uba70 \ubd88\ub838\uace0 \ub9ce\uc740 \ub9e4\uccb4\uc5d0\uc11c AI\uc758 \ud544\uc694\uc131\uc744 \uc678\ucce4\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \ubb34\uc218\ud788 \ub9ce\uc740 \uae30\uc5c5\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc218\ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\ub97c \uc9c4\ud589\ud558\uc600\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uadf8 \uacb0\uacfc\ub294 \uc5b4\ub5bb\uac8c \ub418\uc5c8\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uc5d8\ub9ac\uba3c\ud2b8 AI\uc758 \uc74c\ubcd1\ucc2c \ub3d9\ubd81\uc544 \uc9c0\uc5ed \ucd1d\uad04\ucc45\uc784\uc790\ub294 ",(0,i.kt)("a",{parentName:"p",href:"https://zdnet.co.kr/view/?no=20200611062002"},(0,i.kt)("em",{parentName:"a"},'"10\uac1c \uae30\uc5c5\uc5d0 AI \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud55c\ub2e4\uba74 \uadf8\uc911 9\uac1c\ub294 \ucee8\uc149\uac80\uc99d(POC)\ub9cc \ud558\ub2e4 \ub05d\ub09c\ub2e4"')),"\uace0 \ub9d0\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ucc98\ub7fc \ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uc774 \ubb38\uc81c\ub97c \ud480 \uc218 \uc788\uc744 \uac83 \uac19\ub2e4\ub294 \uac00\ub2a5\uc131\ub9cc\uc744 \ubcf4\uc5ec\uc8fc\uace0 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \uc774 \uc2dc\uae30\ucbe4\uc5d0 ",(0,i.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI\uc5d0 \ub2e4\uc2dc \uaca8\uc6b8"),"\uc774 \ub2e4\uac00\uc624\uace0 \uc788\ub2e4\ub294 \uc804\ub9dd\ub3c4 \ub098\uc624\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc65c \ud504\ub85c\uc81d\ud2b8 \ub300\ubd80\ubd84\uc774 \ucee8\uc149\uac80\uc99d(POC) \ub2e8\uacc4\uc5d0\uc11c \ub05d\ub0ac\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd \ucf54\ub4dc\ub9cc\uc73c\ub85c\ub294 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub97c \uc6b4\uc601\ud560 \uc218 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc2e4\uc81c \uc11c\ube44\uc2a4 \ub2e8\uacc4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc758 \ucf54\ub4dc\uac00 \ucc28\uc9c0\ud558\ub294 \ubd80\ubd84\uc740 \uc0dd\uac01\ubcf4\ub2e4 \ud06c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ub2e8\uc21c\ud788 \ubaa8\ub378\uc758 \uc131\ub2a5\ub9cc\uc774 \uc544\ub2cc \ub2e4\ub978 \ub9ce\uc740 \ubd80\ubd84\uc744 \uace0\ub824\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uad6c\uae00\uc740 \uc774\ub7f0 \ubb38\uc81c\ub97c 2015\ub144 ",(0,i.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),"\uc5d0\uc11c \uc9c0\uc801\ud55c \ubc14 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uc774 \ub17c\ubb38\uc774 \ub098\uc62c \ub2f9\uc2dc\uc5d0\ub294 \uc544\uc9c1 \ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc758 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud558\uae30 \ubc14\uc05c \uc2dc\uae30\uc600\uae30 \ub54c\ubb38\uc5d0, \ub17c\ubb38\uc774 \uc9c0\uc801\ud558\ub294 \ubc14\uc5d0 \ub9ce\uc740 \uc8fc\uc758\ub97c \uae30\uc6b8\uc774\uc9c0\ub294 \uc54a\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 \uba87 \ub144\uc774 \uc9c0\ub09c \ud6c4 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud574\ub0b4\uc5b4, \uc774\uc81c \uc0ac\ub78c\ub4e4\uc740 \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0 \uc801\uc6a9\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uace7 \ub9ce\uc740 \uc0ac\ub78c\uc774 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub294 \uc27d\uc9c0 \uc54a\ub2e4\ub294 \uac83\uc744 \uae68\ub2ec\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"devops"},"Devops"),(0,i.kt)("p",null,"MLOps\ub294 \uc774\uc804\uc5d0 \uc5c6\ub358 \uc0c8\ub85c\uc6b4 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c DevOps\ub77c\uace0 \ubd88\ub9ac\ub294 \uac1c\ubc1c \ubc29\ubc95\ub860\uc5d0\uc11c \ud30c\uc0dd\ub41c \ub2e8\uc5b4\uc785\ub2c8\ub2e4. \uadf8\ub807\uae30\uc5d0 DevOps\ub97c \uc774\ud574\ud55c\ub2e4\uba74 MLOps\ub97c \uc774\ud574\ud558\ub294 \ub370 \ub3c4\uc6c0\uc774 \ub429\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"devops-1"},"DevOps"),(0,i.kt)("p",null,"DevOps\ub294 Development(\uac1c\ubc1c)\uc640 Operations(\uc6b4\uc601)\uc758 \ud569\uc131\uc5b4\ub85c \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc758 \uac1c\ubc1c(Development)\uacfc \uc6b4\uc601(Operations)\uc758 \ud569\uc131\uc5b4\ub85c\uc11c \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c\uc790\uc640 \uc815\ubcf4\uae30\uc220 \uc804\ubb38\uac00 \uac04\uc758 \uc18c\ud1b5, \ud611\uc5c5 \ubc0f \ud1b5\ud569\uc744 \uac15\uc870\ud558\ub294 \uac1c\ubc1c \ud658\uacbd\uc774\ub098 \ubb38\ud654\ub97c \ub9d0\ud569\ub2c8\ub2e4.\nDevOps\uc758 \ubaa9\uc801\uc740 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \uc870\uc9c1\uacfc \uc6b4\uc601 \uc870\uc9c1\uac04\uc758 \uc0c1\ud638 \uc758\uc874\uc801 \ub300\uc751\uc774\uba70 \uc870\uc9c1\uc774 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uacfc \uc11c\ube44\uc2a4\ub97c \ube60\ub978 \uc2dc\uac04\uc5d0 \uac1c\ubc1c \ubc0f \ubc30\ud3ec\ud558\ub294 \uac83\uc744 \ubaa9\uc801\uc73c\ub85c \ud569\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,i.kt)("p",null,"\uadf8\ub7fc \uac04\ub2e8\ud55c \uc0c1\ud669 \uc124\uba85\uc744 \ud1b5\ud574 DevOps\uac00 \uc65c \ud544\uc694\ud55c\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uc9c0\uc6d0\ud558\ub294 \uae30\ub2a5\uc774 \ub9ce\uc9c0 \uc54a\uc73c\uba70 \ud300 \ub610\ub294 \ud68c\uc0ac\uc758 \uaddc\ubaa8\uac00 \uc791\uc2b5\ub2c8\ub2e4. \uc774\ub54c\uc5d0\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uad6c\ubd84\uc774 \uc5c6\uac70\ub098 \uc791\uc740 \uaddc\ubaa8\uc758 \ud300\uc73c\ub85c \uad6c\ubd84\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \ud575\uc2ec\uc740 \uaddc\ubaa8\uac00 \uc791\ub2e4\ub294 \uac83\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub54c\ub294 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \uc811\uc810\uc774 \ub9ce\uace0, \uc9d1\uc911\ud574\uc57c \ud558\ub294 \uc11c\ube44\uc2a4\uac00 \uc801\uae30 \ub54c\ubb38\uc5d0 \ube60\ub974\uac8c \uc11c\ube44\uc2a4\ub97c \uac1c\uc120\ud574 \ub098\uac08 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \ucee4\uc9c8\uc218\ub85d \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc740 \ubd84\ub9ac\ub418\uace0 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \ucc44\ub110\uc758 \ubb3c\ub9ac\uc801\uc778 \ud55c\uacc4\uac00 \uc624\uac8c \ub429\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\ub978 \ud300\uacfc \ud568\uaed8\ud558\ub294 \ubbf8\ud305\uc5d0 \ud300\uc6d0 \uc804\uccb4\uac00 \ubbf8\ud305\uc744 \ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c \uac01 \ud300\uc758 \ud300\uc7a5 \ud639\uc740 \uc18c\uc218\uc758 \uc2dc\ub2c8\uc5b4\ub9cc \ucc38\uc11d\ud558\uc5ec \ubbf8\ud305\uc744 \uc9c4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7f0 \uc18c\ud1b5 \ucc44\ub110\uc758 \ud55c\uacc4\ub294 \ud544\uc5f0\uc801\uc73c\ub85c \uc18c\ud1b5\uc758 \ubd80\uc7ac\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7ec\ub2e4 \ubcf4\uba74 \uac1c\ubc1c\ud300\uc740 \uc0c8\ub85c\uc6b4 \uae30\ub2a5\ub4e4\uc744 \uacc4\uc18d\ud574\uc11c \uac1c\ubc1c\ud558\uace0 \uc6b4\uc601\ud300 \uc785\uc7a5\uc5d0\uc11c\ub294 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \ubc30\ud3ec \uc2dc \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\ub294 \ub4f1 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc704\uc640 \uac19\uc740 \uc0c1\ud669\uc774 \ubc18\ubcf5\ub418\uba74 \uc870\uc9c1 \uc774\uae30\uc8fc\uc758\ub77c\uace0 \ubd88\ub9ac\ub294 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"silo",src:n(6651).Z,width:"892",height:"498"})),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"\uc0ac\uc77c\ub85c(silo)\ub294 \uace1\uc2dd\uc774\ub098 \uc0ac\ub8cc\ub97c \uc800\uc7a5\ud558\ub294 \uad74\ub69d \ubaa8\uc591\uc758 \ucc3d\uace0\ub97c \uc758\ubbf8\ud55c\ub2e4. \uc0ac\uc77c\ub85c\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc874\uc7ac\ud558\uba70 \uc800\uc7a5\ub418\ub294 \ubb3c\ud488\uc774 \uc11c\ub85c \uc11e\uc774\uc9c0 \uc54a\ub3c4\ub85d \ucca0\uc800\ud788 \uad00\ub9ac\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc900\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc0ac\uc77c\ub85c \ud6a8\uacfc(Organizational Silos Effect)\ub294 \uc870\uc9c1 \ubd80\uc11c \uac04\uc5d0 \uc11c\ub85c \ud611\ub825\ud558\uc9c0 \uc54a\uace0 \ub0b4\ubd80 \uc774\uc775\ub9cc\uc744 \ucd94\uad6c\ud558\ub294 \ud604\uc0c1\uc744 \uc758\ubbf8\ud55c\ub2e4. \uc870\uc9c1 \ub0b4\uc5d0\uc11c \uac1c\ubcc4 \ubd80\uc11c\ub07c\ub9ac \uc11c\ub85c \ub2f4\uc744 \uc313\uace0 \uac01\uc790\uc758 \uc774\uc775\uc5d0\ub9cc \ubab0\ub450\ud558\ub294 \ubd80\uc11c \uc774\uae30\uc8fc\uc758\ub97c \uc77c\uceeb\ub294\ub2e4.")),(0,i.kt)("p",null,"\uc0ac\uc77c\ub85c \ud604\uc0c1\uc740 \uc11c\ube44\uc2a4 \ud488\uc9c8\uc758 \uc800\ud558\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7ec\ud55c \uc0ac\uc77c\ub85c \ud604\uc0c1\uc744 \ud574\uacb0\ud558\uae30 \uc704\ud574 \ub098\uc628 \uac83\uc774 \ubc14\ub85c DevOps\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"cicd"},"CI/CD"),(0,i.kt)("p",null,"Continuous Integration(CI) \uc640 Continuous Delivery (CD)\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uc7a5\ubcbd\uc744 \ud574\uc81c\ud558\uae30 \uc704\ud55c \uad6c\uccb4\uc801\uc778 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"cicd",src:n(7430).Z,width:"1400",height:"299"})),(0,i.kt)("p",null,"\uc774 \ubc29\ubc95\uc744 \ud1b5\ud574\uc11c \uac1c\ubc1c\ud300\uc5d0\uc11c\ub294 \uc6b4\uc601\ud300\uc758 \ud658\uacbd\uc744 \uc774\ud574\ud558\uace0 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c \uc911\uc778 \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uae4c\uc9c0 \uc774\uc5b4\uc9c8 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uac80\uc99d\ub41c \uae30\ub2a5 \ub610\ub294 \uac1c\uc120\ub41c \uc81c\ud488\uc744 \ub354 \uc790\uc8fc \ubc30\ud3ec\ud574 \uace0\uac1d\uc758 \uc81c\ud488 \uacbd\ud5d8\uc744 \uc0c1\uc2b9\uc2dc\ud0b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \uc885\ud569\ud558\uc790\uba74 DevOps\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300 \uac04\uc758 \ubb38\uc81c\uac00 \uc788\uc5c8\uace0 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\ub860\uc785\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"mlops"},"MLOps"),(0,i.kt)("h3",{id:"1-mlops"},"1) ML+Ops"),(0,i.kt)("p",null,"MLOps\ub294 Machine Learning \uacfc Operations\uc758 \ud569\uc131\uc5b4\ub85c DevOps\uc5d0\uc11c Dev\uac00 ML\ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc81c \uc55e\uc5d0\uc11c \uc0b4\ud3b4\ubcf8 DevOps\ub97c \ud1b5\ud574 MLOps\uac00 \ubb34\uc5c7\uc778\uc9c0 \uc9d0\uc791\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\u201cMLOps\ub294 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc758 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\uc785\ub2c8\ub2e4.\u201d\n\uc774 \ub9d0\uc740 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300 \uc0ac\uc774\uc5d0 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\ub2e4\ub294 \uc758\ubbf8\uc785\ub2c8\ub2e4. \uadf8\ub7fc \uc65c \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc5d0\ub294 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\uc744\uae4c\uc694? \ub450 \ud300 \uac04\uc758 \ubb38\uc81c\ub97c \uc54c\uc544\ubcf4\uae30 \uc704\ud574\uc11c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \uc608\uc2dc\ub85c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"rule-based"},"Rule Based"),(0,i.kt)("p",null,"\ucc98\uc74c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \ub9cc\ub4dc\ub294 \uacbd\uc6b0 \uac04\ub2e8\ud55c \uaddc\uce59\uc744 \uae30\ubc18\uc73c\ub85c \uc544\uc774\ud15c\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c 1\uc8fc\uc77c\uac04 \ud310\ub9e4\ub7c9\uc774 \uac00\uc7a5 \ub9ce\uc740 \uc21c\uc11c\ub300\ub85c \ubcf4\uc5ec\uc8fc\ub294 \uc2dd\uc758 \ubc29\uc2dd\uc744 \uc774\uc6a9\ud569\ub2c8\ub2e4. \uc774 \ubc29\uc2dd\uc73c\ub85c \ubaa8\ub378\uc744 \uc815\ud55c\ub2e4\uba74 \ud2b9\ubcc4\ud55c \uc774\uc720\uac00 \uc5c6\ub294 \uc774\uc0c1 \ubaa8\ub378\uc758 \uc218\uc815\uc774 \ud544\uc694 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"machine-learning"},"Machine Learning"),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \uc870\uae08 \ucee4\uc9c0\uace0 \ub85c\uadf8 \ub370\uc774\ud130\uac00 \ub9ce\uc774 \uc313\uc778\ub2e4\uba74 \uc774\ub97c \uc774\uc6a9\ud574 \uc544\uc774\ud15c \uae30\ubc18 \ud639\uc740 \uc720\uc800 \uae30\ubc18\uc758 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4. \uc774\ub54c \ubaa8\ub378\uc740 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"deep-learning"},"Deep Learning"),(0,i.kt)("p",null,"\uac1c\uc778\ud654 \ucd94\ucc9c\uc5d0 \ub300\ud55c \uc694\uad6c\uac00 \ub354 \ucee4\uc9c0\uace0 \ub354 \uc88b\uc740 \uc131\ub2a5\uc744 \ub0b4\ub294 \ubaa8\ub378\uc744 \ud544\uc694\ud574\uc9c8 \uacbd\uc6b0 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \uc774\ub54c \ub9cc\ub4dc\ub294 \ubaa8\ub378\uc740 \uba38\uc2e0\ub7ec\ub2dd\uacfc \uac19\uc774 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"graph",src:n(2994).Z,width:"752",height:"582"})),(0,i.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc744 x\ucd95\uc744 \ubaa8\ub378\uc758 \ubcf5\uc7a1\ub3c4, y\ucd95\uc744 \ubaa8\ub378\uc758 \uc131\ub2a5\uc73c\ub85c \ub450\uace0 \uadf8\ub798\ud504\ub85c \ud45c\ud604\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf5\uc7a1\ub3c4\uac00 \uc62c\ub77c\uac08 \ub54c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc62c\ub77c\uac00\ub294 \uc0c1\uc2b9 \uad00\uacc4\ub97c \uac16\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ub525\ub7ec\ub2dd\uc73c\ub85c \ub118\uc5b4\uac08 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc774 \uc0c8\ub85c \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ub9cc\uc57d \uad00\ub9ac\ud574\uc57c\ud560 \ubaa8\ub378\uc774 \uc801\ub2e4\uba74 \uc11c\ub85c \ud611\uc5c5\uc744 \ud1b5\ud574\uc11c \ucda9\ubd84\ud788 \ud574\uacb0\ud560 \uc218 \uc788\uc9c0\ub9cc \uac1c\ubc1c\ud574\uc57c \ud560 \ubaa8\ub378\uc774 \ub9ce\uc544\uc9c4\ub2e4\uba74 DevOps\uc758 \uacbd\uc6b0\uc640 \uac19\uc774 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \ub098\ud0c0\ub098\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"DevOps\uc758 \ubaa9\ud45c\uc640 \ub9de\ucdb0\uc11c \uc0dd\uac01\ud574\ubcf4\uba74 MLOps\uc758 \ubaa9\ud45c\ub294 \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud14c\uc2a4\ud2b8\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc774 DevOps\uc758 \ubaa9\ud45c\uc600\ub2e4\uba74, MLOps\uc758 \ubaa9\ud45c\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \ucd5c\uadfc \ub098\uc624\uace0 \uc788\ub294 MLOps \uad00\ub828 \uc81c\ud488\uacfc \uc124\uba85\uc744 \ubcf4\uba74 \uaf2d \uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ubaa9\ud45c\ub9cc\uc744 \ub300\uc0c1\uc73c\ub85c \ud558\uace0 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc5b4\ub5a4 \uacbd\uc6b0\uc5d0\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \ub9cc\ub4e0 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub824\uace0 \ud569\ub2c8\ub2e4. \uc774\ub7ec\ud55c \ub2c8\uc988\ub294 \ucd5c\uadfc \uba38\uc2e0\ub7ec\ub2dd \ud504\ub85c\uc81d\ud2b8\uac00 \uc9c4\ud589\ub418\ub294 \uacfc\uc815\uc5d0\uc11c \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc758 \uacbd\uc6b0 \uc6b4\uc601\uc5d0\uc11c \uac04\ub2e8\ud55c \ubaa8\ub378\ubd80\ud130 \uc2dc\uc791\ud574 \uc6b4\uc601\ud560 \uc218 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc790\uc5f0\uc5b4, \uc774\ubbf8\uc9c0\uc640 \uac19\uc740 \uacf3\uc5d0\uc11c\ub294 \uaddc\uce59 \uae30\ubc18\uc758 \ubaa8\ub378\ubcf4\ub2e4\ub294 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud574 \uc8fc\uc5b4\uc9c4 \ud0dc\uc2a4\ud06c\ub97c \ud574\uacb0\ud560 \uc218 \uc788\ub294\uc9c0 \uac80\uc99d(POC)\ub97c \uc120\ud589\ud558\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uac80\uc99d\uc774 \ub05d\ub09c \ud504\ub85c\uc81d\ud2b8\ub294 \uc774\uc81c \uc11c\ube44\uc2a4\ub97c \uc704\ud55c \uc6b4\uc601 \ud658\uacbd\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uba38\uc2e0\ub7ec\ub2dd \ud300 \ub0b4\uc758 \uc790\uccb4 \uc5ed\ub7c9\uc73c\ub85c\ub294 \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc27d\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud55c \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"3-\uacb0\ub860"},"3) \uacb0\ub860"),(0,i.kt)("p",null,"\uc694\uc57d\ud558\uc790\uba74 MLOps\ub294 \ub450 \uac00\uc9c0 \ubaa9\ud45c\uac00 \uc788\uc2b5\ub2c8\ub2e4.\n\uc55e\uc5d0\uc11c \uc124\uba85\ud55c MLOps\ub294 ML+Ops \ub85c \ub450 \ud300\uc758 \uc0dd\uc0b0\uc131 \ud5a5\uc0c1\uc744 \uc704\ud55c \uac83\uc774\uc600\uc2b5\ub2c8\ub2e4.\n\ubc18\uba74, \ub4a4\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc740 ML->Ops \ub85c \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 \uac83\uc744 \ub9d0\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},7430:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},2994:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},6651:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2416],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var a=r.createContext({}),s=function(e){var t=r.useContext(a),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(a.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,l=e.originalType,a=e.parentName,c=p(e,["components","mdxType","originalType","parentName"]),d=s(n),f=i,m=d["".concat(a,".").concat(f)]||d[f]||u[f]||l;return n?r.createElement(m,o(o({ref:t},c),{},{components:n})):r.createElement(m,o({ref:t},c))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var l=n.length,o=new Array(l);o[0]=f;var p={};for(var a in t)hasOwnProperty.call(t,a)&&(p[a]=t[a]);p.originalType=e,p[d]="string"==typeof e?e:i,o[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>a,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var r=n(7462),i=(n(7294),n(3905));const l={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},o=void 0,p={unversionedId:"introduction/intro",id:"introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/docs/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/docs/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/docs/introduction/levels"}},a={},s=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML+Ops",id:"1-mlops",level:3},{value:"Rule Based",id:"rule-based",level:4},{value:"Machine Learning",id:"machine-learning",level:4},{value:"Deep Learning",id:"deep-learning",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) \uacb0\ub860",id:"3-\uacb0\ub860",level:3}],c={toc:s},d="wrapper";function u(e){let{components:t,...l}=e;return(0,i.kt)(d,(0,r.Z)({},c,l,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,i.kt)("p",null,"2012\ub144 Alexnet \uc774\ud6c4 CV, NLP\ub97c \ube44\ub86f\ud558\uc5ec \ub370\uc774\ud130\uac00 \uc874\uc7ac\ud558\ub294 \ub3c4\uba54\uc778\uc774\ub77c\uba74 \uc5b4\ub514\uc11c\ub4e0 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \ub3c4\uc785\ud558\uace0\uc790 \ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc740 AI\ub77c\ub294 \ub2e8\uc5b4\ub85c \ubb36\uc774\uba70 \ubd88\ub838\uace0 \ub9ce\uc740 \ub9e4\uccb4\uc5d0\uc11c AI\uc758 \ud544\uc694\uc131\uc744 \uc678\ucce4\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \ubb34\uc218\ud788 \ub9ce\uc740 \uae30\uc5c5\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc218\ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\ub97c \uc9c4\ud589\ud558\uc600\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uadf8 \uacb0\uacfc\ub294 \uc5b4\ub5bb\uac8c \ub418\uc5c8\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uc5d8\ub9ac\uba3c\ud2b8 AI\uc758 \uc74c\ubcd1\ucc2c \ub3d9\ubd81\uc544 \uc9c0\uc5ed \ucd1d\uad04\ucc45\uc784\uc790\ub294 ",(0,i.kt)("a",{parentName:"p",href:"https://zdnet.co.kr/view/?no=20200611062002"},(0,i.kt)("em",{parentName:"a"},'"10\uac1c \uae30\uc5c5\uc5d0 AI \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud55c\ub2e4\uba74 \uadf8\uc911 9\uac1c\ub294 \ucee8\uc149\uac80\uc99d(POC)\ub9cc \ud558\ub2e4 \ub05d\ub09c\ub2e4"')),"\uace0 \ub9d0\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ucc98\ub7fc \ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uc774 \ubb38\uc81c\ub97c \ud480 \uc218 \uc788\uc744 \uac83 \uac19\ub2e4\ub294 \uac00\ub2a5\uc131\ub9cc\uc744 \ubcf4\uc5ec\uc8fc\uace0 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \uc774 \uc2dc\uae30\ucbe4\uc5d0 ",(0,i.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI\uc5d0 \ub2e4\uc2dc \uaca8\uc6b8"),"\uc774 \ub2e4\uac00\uc624\uace0 \uc788\ub2e4\ub294 \uc804\ub9dd\ub3c4 \ub098\uc624\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc65c \ud504\ub85c\uc81d\ud2b8 \ub300\ubd80\ubd84\uc774 \ucee8\uc149\uac80\uc99d(POC) \ub2e8\uacc4\uc5d0\uc11c \ub05d\ub0ac\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd \ucf54\ub4dc\ub9cc\uc73c\ub85c\ub294 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub97c \uc6b4\uc601\ud560 \uc218 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc2e4\uc81c \uc11c\ube44\uc2a4 \ub2e8\uacc4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc758 \ucf54\ub4dc\uac00 \ucc28\uc9c0\ud558\ub294 \ubd80\ubd84\uc740 \uc0dd\uac01\ubcf4\ub2e4 \ud06c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ub2e8\uc21c\ud788 \ubaa8\ub378\uc758 \uc131\ub2a5\ub9cc\uc774 \uc544\ub2cc \ub2e4\ub978 \ub9ce\uc740 \ubd80\ubd84\uc744 \uace0\ub824\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uad6c\uae00\uc740 \uc774\ub7f0 \ubb38\uc81c\ub97c 2015\ub144 ",(0,i.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),"\uc5d0\uc11c \uc9c0\uc801\ud55c \ubc14 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uc774 \ub17c\ubb38\uc774 \ub098\uc62c \ub2f9\uc2dc\uc5d0\ub294 \uc544\uc9c1 \ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc758 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud558\uae30 \ubc14\uc05c \uc2dc\uae30\uc600\uae30 \ub54c\ubb38\uc5d0, \ub17c\ubb38\uc774 \uc9c0\uc801\ud558\ub294 \ubc14\uc5d0 \ub9ce\uc740 \uc8fc\uc758\ub97c \uae30\uc6b8\uc774\uc9c0\ub294 \uc54a\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 \uba87 \ub144\uc774 \uc9c0\ub09c \ud6c4 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud574\ub0b4\uc5b4, \uc774\uc81c \uc0ac\ub78c\ub4e4\uc740 \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0 \uc801\uc6a9\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uace7 \ub9ce\uc740 \uc0ac\ub78c\uc774 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub294 \uc27d\uc9c0 \uc54a\ub2e4\ub294 \uac83\uc744 \uae68\ub2ec\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"devops"},"Devops"),(0,i.kt)("p",null,"MLOps\ub294 \uc774\uc804\uc5d0 \uc5c6\ub358 \uc0c8\ub85c\uc6b4 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c DevOps\ub77c\uace0 \ubd88\ub9ac\ub294 \uac1c\ubc1c \ubc29\ubc95\ub860\uc5d0\uc11c \ud30c\uc0dd\ub41c \ub2e8\uc5b4\uc785\ub2c8\ub2e4. \uadf8\ub807\uae30\uc5d0 DevOps\ub97c \uc774\ud574\ud55c\ub2e4\uba74 MLOps\ub97c \uc774\ud574\ud558\ub294 \ub370 \ub3c4\uc6c0\uc774 \ub429\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"devops-1"},"DevOps"),(0,i.kt)("p",null,"DevOps\ub294 Development(\uac1c\ubc1c)\uc640 Operations(\uc6b4\uc601)\uc758 \ud569\uc131\uc5b4\ub85c \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc758 \uac1c\ubc1c(Development)\uacfc \uc6b4\uc601(Operations)\uc758 \ud569\uc131\uc5b4\ub85c\uc11c \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c\uc790\uc640 \uc815\ubcf4\uae30\uc220 \uc804\ubb38\uac00 \uac04\uc758 \uc18c\ud1b5, \ud611\uc5c5 \ubc0f \ud1b5\ud569\uc744 \uac15\uc870\ud558\ub294 \uac1c\ubc1c \ud658\uacbd\uc774\ub098 \ubb38\ud654\ub97c \ub9d0\ud569\ub2c8\ub2e4.\nDevOps\uc758 \ubaa9\uc801\uc740 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \uc870\uc9c1\uacfc \uc6b4\uc601 \uc870\uc9c1\uac04\uc758 \uc0c1\ud638 \uc758\uc874\uc801 \ub300\uc751\uc774\uba70 \uc870\uc9c1\uc774 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uacfc \uc11c\ube44\uc2a4\ub97c \ube60\ub978 \uc2dc\uac04\uc5d0 \uac1c\ubc1c \ubc0f \ubc30\ud3ec\ud558\ub294 \uac83\uc744 \ubaa9\uc801\uc73c\ub85c \ud569\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,i.kt)("p",null,"\uadf8\ub7fc \uac04\ub2e8\ud55c \uc0c1\ud669 \uc124\uba85\uc744 \ud1b5\ud574 DevOps\uac00 \uc65c \ud544\uc694\ud55c\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uc9c0\uc6d0\ud558\ub294 \uae30\ub2a5\uc774 \ub9ce\uc9c0 \uc54a\uc73c\uba70 \ud300 \ub610\ub294 \ud68c\uc0ac\uc758 \uaddc\ubaa8\uac00 \uc791\uc2b5\ub2c8\ub2e4. \uc774\ub54c\uc5d0\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uad6c\ubd84\uc774 \uc5c6\uac70\ub098 \uc791\uc740 \uaddc\ubaa8\uc758 \ud300\uc73c\ub85c \uad6c\ubd84\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \ud575\uc2ec\uc740 \uaddc\ubaa8\uac00 \uc791\ub2e4\ub294 \uac83\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub54c\ub294 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \uc811\uc810\uc774 \ub9ce\uace0, \uc9d1\uc911\ud574\uc57c \ud558\ub294 \uc11c\ube44\uc2a4\uac00 \uc801\uae30 \ub54c\ubb38\uc5d0 \ube60\ub974\uac8c \uc11c\ube44\uc2a4\ub97c \uac1c\uc120\ud574 \ub098\uac08 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \ucee4\uc9c8\uc218\ub85d \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc740 \ubd84\ub9ac\ub418\uace0 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \ucc44\ub110\uc758 \ubb3c\ub9ac\uc801\uc778 \ud55c\uacc4\uac00 \uc624\uac8c \ub429\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\ub978 \ud300\uacfc \ud568\uaed8\ud558\ub294 \ubbf8\ud305\uc5d0 \ud300\uc6d0 \uc804\uccb4\uac00 \ubbf8\ud305\uc744 \ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c \uac01 \ud300\uc758 \ud300\uc7a5 \ud639\uc740 \uc18c\uc218\uc758 \uc2dc\ub2c8\uc5b4\ub9cc \ucc38\uc11d\ud558\uc5ec \ubbf8\ud305\uc744 \uc9c4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7f0 \uc18c\ud1b5 \ucc44\ub110\uc758 \ud55c\uacc4\ub294 \ud544\uc5f0\uc801\uc73c\ub85c \uc18c\ud1b5\uc758 \ubd80\uc7ac\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7ec\ub2e4 \ubcf4\uba74 \uac1c\ubc1c\ud300\uc740 \uc0c8\ub85c\uc6b4 \uae30\ub2a5\ub4e4\uc744 \uacc4\uc18d\ud574\uc11c \uac1c\ubc1c\ud558\uace0 \uc6b4\uc601\ud300 \uc785\uc7a5\uc5d0\uc11c\ub294 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \ubc30\ud3ec \uc2dc \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\ub294 \ub4f1 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc704\uc640 \uac19\uc740 \uc0c1\ud669\uc774 \ubc18\ubcf5\ub418\uba74 \uc870\uc9c1 \uc774\uae30\uc8fc\uc758\ub77c\uace0 \ubd88\ub9ac\ub294 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"silo",src:n(6651).Z,width:"892",height:"498"})),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"\uc0ac\uc77c\ub85c(silo)\ub294 \uace1\uc2dd\uc774\ub098 \uc0ac\ub8cc\ub97c \uc800\uc7a5\ud558\ub294 \uad74\ub69d \ubaa8\uc591\uc758 \ucc3d\uace0\ub97c \uc758\ubbf8\ud55c\ub2e4. \uc0ac\uc77c\ub85c\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc874\uc7ac\ud558\uba70 \uc800\uc7a5\ub418\ub294 \ubb3c\ud488\uc774 \uc11c\ub85c \uc11e\uc774\uc9c0 \uc54a\ub3c4\ub85d \ucca0\uc800\ud788 \uad00\ub9ac\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc900\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc0ac\uc77c\ub85c \ud6a8\uacfc(Organizational Silos Effect)\ub294 \uc870\uc9c1 \ubd80\uc11c \uac04\uc5d0 \uc11c\ub85c \ud611\ub825\ud558\uc9c0 \uc54a\uace0 \ub0b4\ubd80 \uc774\uc775\ub9cc\uc744 \ucd94\uad6c\ud558\ub294 \ud604\uc0c1\uc744 \uc758\ubbf8\ud55c\ub2e4. \uc870\uc9c1 \ub0b4\uc5d0\uc11c \uac1c\ubcc4 \ubd80\uc11c\ub07c\ub9ac \uc11c\ub85c \ub2f4\uc744 \uc313\uace0 \uac01\uc790\uc758 \uc774\uc775\uc5d0\ub9cc \ubab0\ub450\ud558\ub294 \ubd80\uc11c \uc774\uae30\uc8fc\uc758\ub97c \uc77c\uceeb\ub294\ub2e4.")),(0,i.kt)("p",null,"\uc0ac\uc77c\ub85c \ud604\uc0c1\uc740 \uc11c\ube44\uc2a4 \ud488\uc9c8\uc758 \uc800\ud558\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7ec\ud55c \uc0ac\uc77c\ub85c \ud604\uc0c1\uc744 \ud574\uacb0\ud558\uae30 \uc704\ud574 \ub098\uc628 \uac83\uc774 \ubc14\ub85c DevOps\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"cicd"},"CI/CD"),(0,i.kt)("p",null,"Continuous Integration(CI) \uc640 Continuous Delivery (CD)\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uc7a5\ubcbd\uc744 \ud574\uc81c\ud558\uae30 \uc704\ud55c \uad6c\uccb4\uc801\uc778 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"cicd",src:n(7430).Z,width:"1400",height:"299"})),(0,i.kt)("p",null,"\uc774 \ubc29\ubc95\uc744 \ud1b5\ud574\uc11c \uac1c\ubc1c\ud300\uc5d0\uc11c\ub294 \uc6b4\uc601\ud300\uc758 \ud658\uacbd\uc744 \uc774\ud574\ud558\uace0 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c \uc911\uc778 \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uae4c\uc9c0 \uc774\uc5b4\uc9c8 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uac80\uc99d\ub41c \uae30\ub2a5 \ub610\ub294 \uac1c\uc120\ub41c \uc81c\ud488\uc744 \ub354 \uc790\uc8fc \ubc30\ud3ec\ud574 \uace0\uac1d\uc758 \uc81c\ud488 \uacbd\ud5d8\uc744 \uc0c1\uc2b9\uc2dc\ud0b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \uc885\ud569\ud558\uc790\uba74 DevOps\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300 \uac04\uc758 \ubb38\uc81c\uac00 \uc788\uc5c8\uace0 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\ub860\uc785\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"mlops"},"MLOps"),(0,i.kt)("h3",{id:"1-mlops"},"1) ML+Ops"),(0,i.kt)("p",null,"MLOps\ub294 Machine Learning \uacfc Operations\uc758 \ud569\uc131\uc5b4\ub85c DevOps\uc5d0\uc11c Dev\uac00 ML\ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc81c \uc55e\uc5d0\uc11c \uc0b4\ud3b4\ubcf8 DevOps\ub97c \ud1b5\ud574 MLOps\uac00 \ubb34\uc5c7\uc778\uc9c0 \uc9d0\uc791\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\u201cMLOps\ub294 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc758 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\uc785\ub2c8\ub2e4.\u201d\n\uc774 \ub9d0\uc740 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300 \uc0ac\uc774\uc5d0 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\ub2e4\ub294 \uc758\ubbf8\uc785\ub2c8\ub2e4. \uadf8\ub7fc \uc65c \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc5d0\ub294 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\uc744\uae4c\uc694? \ub450 \ud300 \uac04\uc758 \ubb38\uc81c\ub97c \uc54c\uc544\ubcf4\uae30 \uc704\ud574\uc11c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \uc608\uc2dc\ub85c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"rule-based"},"Rule Based"),(0,i.kt)("p",null,"\ucc98\uc74c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \ub9cc\ub4dc\ub294 \uacbd\uc6b0 \uac04\ub2e8\ud55c \uaddc\uce59\uc744 \uae30\ubc18\uc73c\ub85c \uc544\uc774\ud15c\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c 1\uc8fc\uc77c\uac04 \ud310\ub9e4\ub7c9\uc774 \uac00\uc7a5 \ub9ce\uc740 \uc21c\uc11c\ub300\ub85c \ubcf4\uc5ec\uc8fc\ub294 \uc2dd\uc758 \ubc29\uc2dd\uc744 \uc774\uc6a9\ud569\ub2c8\ub2e4. \uc774 \ubc29\uc2dd\uc73c\ub85c \ubaa8\ub378\uc744 \uc815\ud55c\ub2e4\uba74 \ud2b9\ubcc4\ud55c \uc774\uc720\uac00 \uc5c6\ub294 \uc774\uc0c1 \ubaa8\ub378\uc758 \uc218\uc815\uc774 \ud544\uc694 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"machine-learning"},"Machine Learning"),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \uc870\uae08 \ucee4\uc9c0\uace0 \ub85c\uadf8 \ub370\uc774\ud130\uac00 \ub9ce\uc774 \uc313\uc778\ub2e4\uba74 \uc774\ub97c \uc774\uc6a9\ud574 \uc544\uc774\ud15c \uae30\ubc18 \ud639\uc740 \uc720\uc800 \uae30\ubc18\uc758 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4. \uc774\ub54c \ubaa8\ub378\uc740 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"deep-learning"},"Deep Learning"),(0,i.kt)("p",null,"\uac1c\uc778\ud654 \ucd94\ucc9c\uc5d0 \ub300\ud55c \uc694\uad6c\uac00 \ub354 \ucee4\uc9c0\uace0 \ub354 \uc88b\uc740 \uc131\ub2a5\uc744 \ub0b4\ub294 \ubaa8\ub378\uc744 \ud544\uc694\ud574\uc9c8 \uacbd\uc6b0 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \uc774\ub54c \ub9cc\ub4dc\ub294 \ubaa8\ub378\uc740 \uba38\uc2e0\ub7ec\ub2dd\uacfc \uac19\uc774 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"graph",src:n(2994).Z,width:"752",height:"582"})),(0,i.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc744 x\ucd95\uc744 \ubaa8\ub378\uc758 \ubcf5\uc7a1\ub3c4, y\ucd95\uc744 \ubaa8\ub378\uc758 \uc131\ub2a5\uc73c\ub85c \ub450\uace0 \uadf8\ub798\ud504\ub85c \ud45c\ud604\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf5\uc7a1\ub3c4\uac00 \uc62c\ub77c\uac08 \ub54c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc62c\ub77c\uac00\ub294 \uc0c1\uc2b9 \uad00\uacc4\ub97c \uac16\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ub525\ub7ec\ub2dd\uc73c\ub85c \ub118\uc5b4\uac08 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc774 \uc0c8\ub85c \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ub9cc\uc57d \uad00\ub9ac\ud574\uc57c\ud560 \ubaa8\ub378\uc774 \uc801\ub2e4\uba74 \uc11c\ub85c \ud611\uc5c5\uc744 \ud1b5\ud574\uc11c \ucda9\ubd84\ud788 \ud574\uacb0\ud560 \uc218 \uc788\uc9c0\ub9cc \uac1c\ubc1c\ud574\uc57c \ud560 \ubaa8\ub378\uc774 \ub9ce\uc544\uc9c4\ub2e4\uba74 DevOps\uc758 \uacbd\uc6b0\uc640 \uac19\uc774 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \ub098\ud0c0\ub098\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"DevOps\uc758 \ubaa9\ud45c\uc640 \ub9de\ucdb0\uc11c \uc0dd\uac01\ud574\ubcf4\uba74 MLOps\uc758 \ubaa9\ud45c\ub294 \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud14c\uc2a4\ud2b8\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc774 DevOps\uc758 \ubaa9\ud45c\uc600\ub2e4\uba74, MLOps\uc758 \ubaa9\ud45c\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \ucd5c\uadfc \ub098\uc624\uace0 \uc788\ub294 MLOps \uad00\ub828 \uc81c\ud488\uacfc \uc124\uba85\uc744 \ubcf4\uba74 \uaf2d \uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ubaa9\ud45c\ub9cc\uc744 \ub300\uc0c1\uc73c\ub85c \ud558\uace0 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc5b4\ub5a4 \uacbd\uc6b0\uc5d0\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \ub9cc\ub4e0 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub824\uace0 \ud569\ub2c8\ub2e4. \uc774\ub7ec\ud55c \ub2c8\uc988\ub294 \ucd5c\uadfc \uba38\uc2e0\ub7ec\ub2dd \ud504\ub85c\uc81d\ud2b8\uac00 \uc9c4\ud589\ub418\ub294 \uacfc\uc815\uc5d0\uc11c \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc758 \uacbd\uc6b0 \uc6b4\uc601\uc5d0\uc11c \uac04\ub2e8\ud55c \ubaa8\ub378\ubd80\ud130 \uc2dc\uc791\ud574 \uc6b4\uc601\ud560 \uc218 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc790\uc5f0\uc5b4, \uc774\ubbf8\uc9c0\uc640 \uac19\uc740 \uacf3\uc5d0\uc11c\ub294 \uaddc\uce59 \uae30\ubc18\uc758 \ubaa8\ub378\ubcf4\ub2e4\ub294 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud574 \uc8fc\uc5b4\uc9c4 \ud0dc\uc2a4\ud06c\ub97c \ud574\uacb0\ud560 \uc218 \uc788\ub294\uc9c0 \uac80\uc99d(POC)\ub97c \uc120\ud589\ud558\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uac80\uc99d\uc774 \ub05d\ub09c \ud504\ub85c\uc81d\ud2b8\ub294 \uc774\uc81c \uc11c\ube44\uc2a4\ub97c \uc704\ud55c \uc6b4\uc601 \ud658\uacbd\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uba38\uc2e0\ub7ec\ub2dd \ud300 \ub0b4\uc758 \uc790\uccb4 \uc5ed\ub7c9\uc73c\ub85c\ub294 \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc27d\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud55c \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"3-\uacb0\ub860"},"3) \uacb0\ub860"),(0,i.kt)("p",null,"\uc694\uc57d\ud558\uc790\uba74 MLOps\ub294 \ub450 \uac00\uc9c0 \ubaa9\ud45c\uac00 \uc788\uc2b5\ub2c8\ub2e4.\n\uc55e\uc5d0\uc11c \uc124\uba85\ud55c MLOps\ub294 ML+Ops \ub85c \ub450 \ud300\uc758 \uc0dd\uc0b0\uc131 \ud5a5\uc0c1\uc744 \uc704\ud55c \uac83\uc774\uc600\uc2b5\ub2c8\ub2e4.\n\ubc18\uba74, \ub4a4\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc740 ML->Ops \ub85c \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 \uac83\uc744 \ub9d0\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},7430:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},2994:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},6651:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file diff --git a/assets/js/0425fa84.ff62a655.js b/assets/js/0425fa84.fa9e4732.js similarity index 98% rename from assets/js/0425fa84.ff62a655.js rename to assets/js/0425fa84.fa9e4732.js index 1561fc77..13ae2b52 100644 --- a/assets/js/0425fa84.ff62a655.js +++ b/assets/js/0425fa84.fa9e4732.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5384],{3905:(e,t,r)=>{r.d(t,{Zo:()=>s,kt:()=>m});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var c=n.createContext({}),p=function(e){var t=n.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},s=function(e){var t=p(e.components);return n.createElement(c.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),u=p(r),k=o,m=u["".concat(c,".").concat(k)]||u[k]||d[k]||a;return r?n.createElement(m,i(i({ref:t},s),{},{components:r})):n.createElement(m,i({ref:t},s))}));function m(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=k;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var p=2;p{r.r(t),r.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/docker",id:"version-1.0/prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/docs/1.0/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/docker.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/docs/1.0/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/docs/1.0/prerequisites/docker/command"}},c={},p=[{value:"\ucee8\ud14c\uc774\ub108",id:"\ucee8\ud14c\uc774\ub108",level:2},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:2},{value:"Layer \ud574\uc11d",id:"layer-\ud574\uc11d",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],s={toc:p},u="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(u,(0,n.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"\ucee8\ud14c\uc774\ub108"},"\ucee8\ud14c\uc774\ub108"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud558\uac8c \uc2e4\ud589\ud558\ub294 \uae30\uc220"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \ud30c\uc77c\ub4e4\uc758 \uc9d1\ud569"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub780?",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uae30\ubc18\uc73c\ub85c \uc2e4\ud589\ub41c \ud55c \uac1c\uc758 \ud504\ub85c\uc138\uc2a4"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0\ub85c \ucc0d\uc5b4\ub0b8 \ubd95\uc5b4\ube75")))),(0,o.kt)("h2",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,o.kt)("p",null,"\ub3c4\ucee4\ub294 ",(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108\ub97c \uad00\ub9ac"),"\ud558\uace0 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574\uc8fc\ub294 \ud50c\ub7ab\ud3fc\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ub3c4\ucee4\uc758 \uc2ac\ub85c\uac74\uc740 \ubc14\ub85c ",(0,o.kt)("strong",{parentName:"p"},"Build Once, Run Anywhere")," \ub85c \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud55c \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub3c4\ucee4 \ub0b4\ubd80\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \uacfc\uc815\uc744 \ubcf4\uc790\uba74 \uc2e4\uc81c\ub85c container \ub97c \uc704\ud55c \ub9ac\uc18c\uc2a4\ub97c \ubd84\ub9ac\ud558\uace0, lifecycle \uc744 \uc81c\uc5b4\ud558\ub294 \uae30\ub2a5\uc740 linux kernel \uc758 cgroup \ub4f1\uc774 \uc218\ud589\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc774\ub7ec\ud55c \uc778\ud130\ud398\uc774\uc2a4\ub97c \ubc14\ub85c \uc0ac\uc6a9\ud558\ub294 \uac83\uc740 ",(0,o.kt)("strong",{parentName:"p"},"\ub108\ubb34 \uc5b4\ub835\uae30 \ub54c\ubb38\uc5d0")," \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\uc0c1\ud654 layer\ub97c \ub9cc\ub4e4\uac8c \ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(8753).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"\uc774\ub97c \ud1b5\ud574 \uc0ac\uc6a9\uc790\ub294 \uc0ac\uc6a9\uc790 \uce5c\ud654\uc801\uc778 API \uc778 ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI")," \ub9cc\uc73c\ub85c \uc27d\uac8c \ucee8\ud14c\uc774\ub108\ub97c \uc81c\uc5b4\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"layer-\ud574\uc11d"},"Layer \ud574\uc11d"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub098\uc628 layer\ub4e4\uc758 \uc5ed\ud560\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: linux kernel \uc758 \uae30\ub2a5\uc744 \uc9c1\uc811 \uc0ac\uc6a9\ud574\uc11c, container \ub77c\ub294 \ud558\ub098\uc758 \ud504\ub85c\uc138\uc2a4\uac00 \uc0ac\uc6a9\ud560 \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc640 cpu, memory, filesystem \ub4f1\uc744 \uaca9\ub9ac\uc2dc\ucf1c\uc8fc\ub294 \uae30\ub2a5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"containerd: runC(OCI layer) \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30 \uc704\ud55c \ucd94\uc0c1\ud654 \ub2e8\uacc4\uc774\uba70, \ud45c\uc900\ud654\ub41c \uc778\ud130\ud398\uc774\uc2a4(OCI)\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"dockerd: containerd \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\ub294 \uc5ed\ud560\ub9cc \ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"docker cli: \uc0ac\uc6a9\uc790\ub294 docker cli \ub85c dockerd (Docker daemon)\uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30\ub9cc \ud558\uba74 \ub429\ub2c8\ub2e4.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc774 \ud1b5\uc2e0 \uacfc\uc815\uc5d0\uc11c unix socket \uc744 \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \uac00\ub054 \ub3c4\ucee4 \uad00\ub828 \uc5d0\ub7ec\uac00 \ub098\uba74 ",(0,o.kt)("inlineCode",{parentName:"li"},"/var/run/docker.sock")," \uac00 \uc0ac\uc6a9 \uc911\uc774\ub2e4, \uad8c\ud55c\uc774 \uc5c6\ub2e4 \ub4f1\ub4f1\uc758 \uc5d0\ub7ec \uba54\uc2dc\uc9c0\uac00 \ub098\uc624\ub294 \uac83\uc785\ub2c8\ub2e4.")))),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ub3c4\ucee4\ub294 \ub9ce\uc740 \ub2e8\uacc4\ub97c \uac10\uc2f8\uace0 \uc788\uc9c0\ub9cc, \ud754\ud788 \ub3c4\ucee4\ub77c\ub294 \uc6a9\uc5b4\ub97c \uc0ac\uc6a9\ud560 \ub54c\ub294 Docker CLI \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0, Dockerd \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0 Docker Container \ud558\ub098\ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uc5b4\uc11c \ud63c\ub780\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \ub098\uc624\ub294 \uae00\uc5d0\uc11c\ub3c4 \ub3c4\ucee4\uac00 \uc5ec\ub7ec\uac00\uc9c0 \uc758\ubbf8\ub85c \uc4f0\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ub3c4\ucee4\ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\uc720\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ub098\uc758 ML \ud559\uc2b5/\ucd94\ub860 \ucf54\ub4dc\ub97c OS, python version, python \ud658\uacbd, \ud2b9\uc815 python package \ubc84\uc804\uc5d0 independent \ud558\ub3c4\ub85d \ud574\uc57c \ud55c\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uadf8\ub798\uc11c \ucf54\ub4dc \ubfd0\ub9cc\uc774 \uc544\ub2cc ",(0,o.kt)("strong",{parentName:"li"},"\ud574\ub2f9 \ucf54\ub4dc\uac00 \uc2e4\ud589\ub418\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \uc885\uc18d\uc801\uc778 \ud328\ud0a4\uc9c0, \ud658\uacbd \ubcc0\uc218, \ud3f4\ub354\uba85 \ub4f1\ub4f1\uc744 \ud558\ub098\uc758 \ud328\ud0a4\uc9c0\ub85c")," \ubb36\uc744 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ud654 \uae30\uc220\uc774\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uc774 \uae30\uc220\uc744 \uc27d\uac8c \uc0ac\uc6a9\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc911 \ud558\ub098\uac00 \ub3c4\ucee4\uc774\uba70, \ud328\ud0a4\uc9c0\ub97c \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub978\ub2e4.")))}d.isMDXComponent=!0},8753:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5384],{3905:(e,t,r)=>{r.d(t,{Zo:()=>s,kt:()=>m});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var c=n.createContext({}),p=function(e){var t=n.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},s=function(e){var t=p(e.components);return n.createElement(c.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),u=p(r),k=o,m=u["".concat(c,".").concat(k)]||u[k]||d[k]||a;return r?n.createElement(m,i(i({ref:t},s),{},{components:r})):n.createElement(m,i({ref:t},s))}));function m(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=k;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var p=2;p{r.r(t),r.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/docker",id:"version-1.0/prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/docs/1.0/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/docker.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/docs/1.0/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/docs/1.0/prerequisites/docker/command"}},c={},p=[{value:"\ucee8\ud14c\uc774\ub108",id:"\ucee8\ud14c\uc774\ub108",level:2},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:2},{value:"Layer \ud574\uc11d",id:"layer-\ud574\uc11d",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],s={toc:p},u="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(u,(0,n.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"\ucee8\ud14c\uc774\ub108"},"\ucee8\ud14c\uc774\ub108"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud558\uac8c \uc2e4\ud589\ud558\ub294 \uae30\uc220"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \ud30c\uc77c\ub4e4\uc758 \uc9d1\ud569"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub780?",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uae30\ubc18\uc73c\ub85c \uc2e4\ud589\ub41c \ud55c \uac1c\uc758 \ud504\ub85c\uc138\uc2a4"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0\ub85c \ucc0d\uc5b4\ub0b8 \ubd95\uc5b4\ube75")))),(0,o.kt)("h2",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,o.kt)("p",null,"\ub3c4\ucee4\ub294 ",(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108\ub97c \uad00\ub9ac"),"\ud558\uace0 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574\uc8fc\ub294 \ud50c\ub7ab\ud3fc\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ub3c4\ucee4\uc758 \uc2ac\ub85c\uac74\uc740 \ubc14\ub85c ",(0,o.kt)("strong",{parentName:"p"},"Build Once, Run Anywhere")," \ub85c \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud55c \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub3c4\ucee4 \ub0b4\ubd80\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \uacfc\uc815\uc744 \ubcf4\uc790\uba74 \uc2e4\uc81c\ub85c container \ub97c \uc704\ud55c \ub9ac\uc18c\uc2a4\ub97c \ubd84\ub9ac\ud558\uace0, lifecycle \uc744 \uc81c\uc5b4\ud558\ub294 \uae30\ub2a5\uc740 linux kernel \uc758 cgroup \ub4f1\uc774 \uc218\ud589\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc774\ub7ec\ud55c \uc778\ud130\ud398\uc774\uc2a4\ub97c \ubc14\ub85c \uc0ac\uc6a9\ud558\ub294 \uac83\uc740 ",(0,o.kt)("strong",{parentName:"p"},"\ub108\ubb34 \uc5b4\ub835\uae30 \ub54c\ubb38\uc5d0")," \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\uc0c1\ud654 layer\ub97c \ub9cc\ub4e4\uac8c \ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(8753).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"\uc774\ub97c \ud1b5\ud574 \uc0ac\uc6a9\uc790\ub294 \uc0ac\uc6a9\uc790 \uce5c\ud654\uc801\uc778 API \uc778 ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI")," \ub9cc\uc73c\ub85c \uc27d\uac8c \ucee8\ud14c\uc774\ub108\ub97c \uc81c\uc5b4\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"layer-\ud574\uc11d"},"Layer \ud574\uc11d"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub098\uc628 layer\ub4e4\uc758 \uc5ed\ud560\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: linux kernel \uc758 \uae30\ub2a5\uc744 \uc9c1\uc811 \uc0ac\uc6a9\ud574\uc11c, container \ub77c\ub294 \ud558\ub098\uc758 \ud504\ub85c\uc138\uc2a4\uac00 \uc0ac\uc6a9\ud560 \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc640 cpu, memory, filesystem \ub4f1\uc744 \uaca9\ub9ac\uc2dc\ucf1c\uc8fc\ub294 \uae30\ub2a5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"containerd: runC(OCI layer) \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30 \uc704\ud55c \ucd94\uc0c1\ud654 \ub2e8\uacc4\uc774\uba70, \ud45c\uc900\ud654\ub41c \uc778\ud130\ud398\uc774\uc2a4(OCI)\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"dockerd: containerd \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\ub294 \uc5ed\ud560\ub9cc \ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"docker cli: \uc0ac\uc6a9\uc790\ub294 docker cli \ub85c dockerd (Docker daemon)\uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30\ub9cc \ud558\uba74 \ub429\ub2c8\ub2e4.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc774 \ud1b5\uc2e0 \uacfc\uc815\uc5d0\uc11c unix socket \uc744 \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \uac00\ub054 \ub3c4\ucee4 \uad00\ub828 \uc5d0\ub7ec\uac00 \ub098\uba74 ",(0,o.kt)("inlineCode",{parentName:"li"},"/var/run/docker.sock")," \uac00 \uc0ac\uc6a9 \uc911\uc774\ub2e4, \uad8c\ud55c\uc774 \uc5c6\ub2e4 \ub4f1\ub4f1\uc758 \uc5d0\ub7ec \uba54\uc2dc\uc9c0\uac00 \ub098\uc624\ub294 \uac83\uc785\ub2c8\ub2e4.")))),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ub3c4\ucee4\ub294 \ub9ce\uc740 \ub2e8\uacc4\ub97c \uac10\uc2f8\uace0 \uc788\uc9c0\ub9cc, \ud754\ud788 \ub3c4\ucee4\ub77c\ub294 \uc6a9\uc5b4\ub97c \uc0ac\uc6a9\ud560 \ub54c\ub294 Docker CLI \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0, Dockerd \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0 Docker Container \ud558\ub098\ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uc5b4\uc11c \ud63c\ub780\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \ub098\uc624\ub294 \uae00\uc5d0\uc11c\ub3c4 \ub3c4\ucee4\uac00 \uc5ec\ub7ec\uac00\uc9c0 \uc758\ubbf8\ub85c \uc4f0\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ub3c4\ucee4\ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\uc720\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ub098\uc758 ML \ud559\uc2b5/\ucd94\ub860 \ucf54\ub4dc\ub97c OS, python version, python \ud658\uacbd, \ud2b9\uc815 python package \ubc84\uc804\uc5d0 independent \ud558\ub3c4\ub85d \ud574\uc57c \ud55c\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uadf8\ub798\uc11c \ucf54\ub4dc \ubfd0\ub9cc\uc774 \uc544\ub2cc ",(0,o.kt)("strong",{parentName:"li"},"\ud574\ub2f9 \ucf54\ub4dc\uac00 \uc2e4\ud589\ub418\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \uc885\uc18d\uc801\uc778 \ud328\ud0a4\uc9c0, \ud658\uacbd \ubcc0\uc218, \ud3f4\ub354\uba85 \ub4f1\ub4f1\uc744 \ud558\ub098\uc758 \ud328\ud0a4\uc9c0\ub85c")," \ubb36\uc744 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ud654 \uae30\uc220\uc774\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uc774 \uae30\uc220\uc744 \uc27d\uac8c \uc0ac\uc6a9\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc911 \ud558\ub098\uac00 \ub3c4\ucee4\uc774\uba70, \ud328\ud0a4\uc9c0\ub97c \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub978\ub2e4.")))}d.isMDXComponent=!0},8753:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file diff --git a/assets/js/089e5a41.71e49dd8.js b/assets/js/089e5a41.3bb68be8.js similarity index 99% rename from assets/js/089e5a41.71e49dd8.js rename to assets/js/089e5a41.3bb68be8.js index 79f9e9ff..24a1f666 100644 --- a/assets/js/089e5a41.71e49dd8.js +++ b/assets/js/089e5a41.3bb68be8.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2949],{3905:(e,t,a)=>{a.d(t,{Zo:()=>m,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function l(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},m=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,l=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),u=p(a),k=r,d=u["".concat(s,".").concat(k)]||u[k]||c[k]||l;return a?n.createElement(d,o(o({ref:t},m),{},{components:a})):n.createElement(d,o({ref:t},m))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:r,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const l={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/docs/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/docs/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/docs/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],m={toc:p},u="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(u,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub4c8\uc744 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uacfc\uc815\uc5d0 \uad00\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \uc18c\uac1c\ub418\ub294 \uacfc\uc815\uc740 \ubaa8\ub450 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0\uc11c \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"helm"},"Helm"),(0,r.kt)("p",null,"Helm\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud328\ud0a4\uc9c0\uc640 \uad00\ub828\ub41c \uc790\uc6d0\uc744 \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 Helm v3.7.1 \ubc84\uc804\uc744 \ub0b4\ub824\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"\uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc2dc\uc5b4, \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc758 OS\uc640 CPU\uc5d0 \ub9de\ub294 \ubc14\uc774\ub108\ub9ac\uc758 \ub2e4\uc6b4 \uacbd\ub85c\ub97c \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"helm\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,r.kt)("p",{parentName:"li"},"Environment variables:"),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,r.kt)("p",{parentName:"li"},"..."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("h2",{id:"kustomize"},"Kustomize"),(0,r.kt)("p",null,"kustomize \ub610\ud55c \uc5ec\ub7ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kustomize v3.10.0 \ubc84\uc804\uc758 \ubc14\uc774\ub108\ub9ac\ub97c \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0"),"\uc5d0\uc11c \ud655\uc778 \ud6c4 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kustomize \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,r.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"CSI Plugin\uc740 kubernetes \ub0b4\uc758 \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \ub2f4\ub2f9\ud558\ub294 \ubaa8\ub4c8\uc785\ub2c8\ub2e4. \ub2e8\uc77c \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc27d\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 CSI Plugin\uc778 Local Path Provisioner\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub610\ud55c, \ub2e4\uc74c\uacfc \uac19\uc774 local-path-storage namespace \uc5d0 provisioner pod\uc774 Running \uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uc744 \uc218\ud589\ud558\uc5ec default storage class\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"default storage class\ub85c \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc774 NAME\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"local-path (default)")," \uc778 storage class\uac00 \uc874\uc7ac\ud558\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2949],{3905:(e,t,a)=>{a.d(t,{Zo:()=>m,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function l(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},m=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,l=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),u=p(a),k=r,d=u["".concat(s,".").concat(k)]||u[k]||c[k]||l;return a?n.createElement(d,o(o({ref:t},m),{},{components:a})):n.createElement(d,o({ref:t},m))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:r,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const l={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/docs/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/docs/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/docs/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],m={toc:p},u="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(u,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub4c8\uc744 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uacfc\uc815\uc5d0 \uad00\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \uc18c\uac1c\ub418\ub294 \uacfc\uc815\uc740 \ubaa8\ub450 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0\uc11c \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"helm"},"Helm"),(0,r.kt)("p",null,"Helm\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud328\ud0a4\uc9c0\uc640 \uad00\ub828\ub41c \uc790\uc6d0\uc744 \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 Helm v3.7.1 \ubc84\uc804\uc744 \ub0b4\ub824\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"\uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc2dc\uc5b4, \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc758 OS\uc640 CPU\uc5d0 \ub9de\ub294 \ubc14\uc774\ub108\ub9ac\uc758 \ub2e4\uc6b4 \uacbd\ub85c\ub97c \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"helm\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,r.kt)("p",{parentName:"li"},"Environment variables:"),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,r.kt)("p",{parentName:"li"},"..."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("h2",{id:"kustomize"},"Kustomize"),(0,r.kt)("p",null,"kustomize \ub610\ud55c \uc5ec\ub7ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kustomize v3.10.0 \ubc84\uc804\uc758 \ubc14\uc774\ub108\ub9ac\ub97c \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0"),"\uc5d0\uc11c \ud655\uc778 \ud6c4 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kustomize \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,r.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"CSI Plugin\uc740 kubernetes \ub0b4\uc758 \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \ub2f4\ub2f9\ud558\ub294 \ubaa8\ub4c8\uc785\ub2c8\ub2e4. \ub2e8\uc77c \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc27d\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 CSI Plugin\uc778 Local Path Provisioner\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub610\ud55c, \ub2e4\uc74c\uacfc \uac19\uc774 local-path-storage namespace \uc5d0 provisioner pod\uc774 Running \uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uc744 \uc218\ud589\ud558\uc5ec default storage class\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"default storage class\ub85c \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc774 NAME\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"local-path (default)")," \uc778 storage class\uac00 \uc874\uc7ac\ud558\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/13e7227b.64afb4b7.js b/assets/js/13e7227b.e14a8c54.js similarity index 99% rename from assets/js/13e7227b.64afb4b7.js rename to assets/js/13e7227b.e14a8c54.js index 2410c403..8ed348b0 100644 --- a/assets/js/13e7227b.64afb4b7.js +++ b/assets/js/13e7227b.e14a8c54.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[434],{3905:(e,t,l)=>{l.d(t,{Zo:()=>m,kt:()=>k});var n=l(7294);function r(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function a(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(r[l]=e[l]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(r[l]=e[l])}return r}var p=n.createContext({}),i=function(e){var t=n.useContext(p),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},m=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var l=e.components,r=e.mdxType,a=e.originalType,p=e.parentName,m=s(e,["components","mdxType","originalType","parentName"]),c=i(l),f=r,k=c["".concat(p,".").concat(f)]||c[f]||u[f]||a;return l?n.createElement(k,o(o({ref:t},m),{},{components:l})):n.createElement(k,o({ref:t},m))}));function k(e,t){var l=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=l.length,o=new Array(a);o[0]=f;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:r,o[1]=s;for(var i=2;i{l.r(t),l.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>a,metadata:()=>s,toc:()=>i});var n=l(7462),r=(l(7294),l(3905));const a={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/docs/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/docs/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/docs/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/docs/setup-components/install-components-seldon"}},p={},i=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"PostgreSQL DB \uc124\uce58",id:"postgresql-db-\uc124\uce58",level:3},{value:"Minio \uc124\uc815",id:"minio-\uc124\uc815",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3}],m={toc:i},c="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,r.kt)("p",null,"MLflow\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4 ML \uc2e4\ud5d8 \uad00\ub9ac \ub3c4\uad6c\uc785\ub2c8\ub2e4. MLflow\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"\uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4")," \uc678\uc5d0\ub3c4 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"ML Model \ud328\ud0a4\uc9d5"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"ML \ubaa8\ub378 \ubc30\ud3ec \uad00\ub9ac"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"ML \ubaa8\ub378 \uc800\uc7a5"),"\uacfc \uac19\uc740 \uae30\ub2a5\ub3c4 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 MLflow\ub97c \uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c MLflow\uc5d0\uc11c \uad00\ub9ac\ud558\ub294 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\uace0 UI\ub97c \uc81c\uacf5\ud558\ub294 MLflow Tracking Server\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"postgresql-db-\uc124\uce58"},"PostgreSQL DB \uc124\uce58"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Backend Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 PostgreSQL DB\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uba3c\uc800 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"\uc774\ub77c\ub294 namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,r.kt)("p",null,"postgresql DB\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace \uc5d0 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1\uac1c\uc758 postgresql \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,r.kt)("h3",{id:"minio-\uc124\uc815"},"Minio \uc124\uc815"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Artifacts Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 Minio\ub294 \uc774\uc804 Kubeflow \uc124\uce58 \ub2e8\uacc4\uc5d0\uc11c \uc124\uce58\ud55c Minio\ub97c \ud65c\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e8, kubeflow \uc6a9\ub3c4\uc640 mlflow \uc6a9\ub3c4\ub97c \ubd84\ub9ac\ud558\uae30 \uc704\ud574, mlflow \uc804\uc6a9 \ubc84\ud0b7(bucket)\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","minio \uc5d0 \uc811\uc18d\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574, \uc6b0\uc120 minio-service \ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"minio-install",src:l(1705).Z,width:"2906",height:"1946"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \ub85c\uadf8\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Username: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio")),(0,r.kt)("li",{parentName:"ul"},"Password: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,r.kt)("p",null,"\uc6b0\uce21 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},(0,r.kt)("inlineCode",{parentName:"strong"},"+"))," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Create Bucket"),"\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create-bucket",src:l(5683).Z,width:"2902",height:"1950"})),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Bucket Name"),"\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub97c \uc785\ub825\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc67c\ucabd\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub77c\ub294 \uc774\ub984\uc758 \ubc84\ud0b7\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-bucket",src:l(5036).Z,width:"2902",height:"1950"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"mlflow-server Helm Chart 0.2.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\uc8fc\uc758"),": \uc704\uc758 helm chart\ub294 MLflow \uc758 backend store \uc640 artifacts store \uc758 \uc811\uc18d \uc815\ubcf4\ub97c kubeflow \uc124\uce58 \uacfc\uc815\uc5d0\uc11c \uc0dd\uc131\ud55c minio\uc640 \uc704\uc758 ",(0,r.kt)("a",{parentName:"li",href:"#postgresql-db-%EC%84%A4%EC%B9%98"},"PostgreSQL DB \uc124\uce58"),"\uc5d0\uc11c \uc0dd\uc131\ud55c postgresql \uc815\ubcf4\ub97c default\ub85c \ud558\uc5ec \uc124\uce58\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ubcc4\uac1c\ub85c \uc0dd\uc131\ud55c DB \ud639\uc740 Object storage\ub97c \ud65c\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0, ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo"),"\ub97c \ucc38\uace0\ud558\uc5ec helm install \uc2dc value\ub97c \ub530\ub85c \uc124\uc815\ud558\uc5ec \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1 \uac1c\uc758 mlflow-server \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c MLflow Server\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-install",src:l(5764).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},5683:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},1705:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},5036:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},5764:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[434],{3905:(e,t,l)=>{l.d(t,{Zo:()=>m,kt:()=>k});var n=l(7294);function r(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function a(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(r[l]=e[l]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(r[l]=e[l])}return r}var p=n.createContext({}),i=function(e){var t=n.useContext(p),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},m=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var l=e.components,r=e.mdxType,a=e.originalType,p=e.parentName,m=s(e,["components","mdxType","originalType","parentName"]),c=i(l),f=r,k=c["".concat(p,".").concat(f)]||c[f]||u[f]||a;return l?n.createElement(k,o(o({ref:t},m),{},{components:l})):n.createElement(k,o({ref:t},m))}));function k(e,t){var l=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=l.length,o=new Array(a);o[0]=f;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:r,o[1]=s;for(var i=2;i{l.r(t),l.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>a,metadata:()=>s,toc:()=>i});var n=l(7462),r=(l(7294),l(3905));const a={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/docs/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/docs/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/docs/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/docs/setup-components/install-components-seldon"}},p={},i=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"PostgreSQL DB \uc124\uce58",id:"postgresql-db-\uc124\uce58",level:3},{value:"Minio \uc124\uc815",id:"minio-\uc124\uc815",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3}],m={toc:i},c="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,r.kt)("p",null,"MLflow\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4 ML \uc2e4\ud5d8 \uad00\ub9ac \ub3c4\uad6c\uc785\ub2c8\ub2e4. MLflow\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"\uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4")," \uc678\uc5d0\ub3c4 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"ML Model \ud328\ud0a4\uc9d5"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"ML \ubaa8\ub378 \ubc30\ud3ec \uad00\ub9ac"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"ML \ubaa8\ub378 \uc800\uc7a5"),"\uacfc \uac19\uc740 \uae30\ub2a5\ub3c4 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 MLflow\ub97c \uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c MLflow\uc5d0\uc11c \uad00\ub9ac\ud558\ub294 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\uace0 UI\ub97c \uc81c\uacf5\ud558\ub294 MLflow Tracking Server\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"postgresql-db-\uc124\uce58"},"PostgreSQL DB \uc124\uce58"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Backend Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 PostgreSQL DB\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uba3c\uc800 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"\uc774\ub77c\ub294 namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,r.kt)("p",null,"postgresql DB\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace \uc5d0 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1\uac1c\uc758 postgresql \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,r.kt)("h3",{id:"minio-\uc124\uc815"},"Minio \uc124\uc815"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Artifacts Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 Minio\ub294 \uc774\uc804 Kubeflow \uc124\uce58 \ub2e8\uacc4\uc5d0\uc11c \uc124\uce58\ud55c Minio\ub97c \ud65c\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e8, kubeflow \uc6a9\ub3c4\uc640 mlflow \uc6a9\ub3c4\ub97c \ubd84\ub9ac\ud558\uae30 \uc704\ud574, mlflow \uc804\uc6a9 \ubc84\ud0b7(bucket)\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","minio \uc5d0 \uc811\uc18d\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574, \uc6b0\uc120 minio-service \ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"minio-install",src:l(1705).Z,width:"2906",height:"1946"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \ub85c\uadf8\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Username: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio")),(0,r.kt)("li",{parentName:"ul"},"Password: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,r.kt)("p",null,"\uc6b0\uce21 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},(0,r.kt)("inlineCode",{parentName:"strong"},"+"))," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Create Bucket"),"\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create-bucket",src:l(5683).Z,width:"2902",height:"1950"})),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Bucket Name"),"\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub97c \uc785\ub825\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc67c\ucabd\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub77c\ub294 \uc774\ub984\uc758 \ubc84\ud0b7\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-bucket",src:l(5036).Z,width:"2902",height:"1950"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"mlflow-server Helm Chart 0.2.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\uc8fc\uc758"),": \uc704\uc758 helm chart\ub294 MLflow \uc758 backend store \uc640 artifacts store \uc758 \uc811\uc18d \uc815\ubcf4\ub97c kubeflow \uc124\uce58 \uacfc\uc815\uc5d0\uc11c \uc0dd\uc131\ud55c minio\uc640 \uc704\uc758 ",(0,r.kt)("a",{parentName:"li",href:"#postgresql-db-%EC%84%A4%EC%B9%98"},"PostgreSQL DB \uc124\uce58"),"\uc5d0\uc11c \uc0dd\uc131\ud55c postgresql \uc815\ubcf4\ub97c default\ub85c \ud558\uc5ec \uc124\uce58\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ubcc4\uac1c\ub85c \uc0dd\uc131\ud55c DB \ud639\uc740 Object storage\ub97c \ud65c\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0, ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo"),"\ub97c \ucc38\uace0\ud558\uc5ec helm install \uc2dc value\ub97c \ub530\ub85c \uc124\uc815\ud558\uc5ec \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1 \uac1c\uc758 mlflow-server \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c MLflow Server\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-install",src:l(5764).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},5683:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},1705:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},5036:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},5764:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file diff --git a/assets/js/15cf2139.fd4e630f.js b/assets/js/15cf2139.18ea7c4d.js similarity index 99% rename from assets/js/15cf2139.fd4e630f.js rename to assets/js/15cf2139.18ea7c4d.js index da41c4f8..9b636252 100644 --- a/assets/js/15cf2139.fd4e630f.js +++ b/assets/js/15cf2139.18ea7c4d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9925],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function s(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),u=o,b=c["".concat(i,".").concat(u)]||c[u]||m[u]||r;return t?a.createElement(b,s(s({ref:n},d),{},{components:t})):a.createElement(b,s({ref:n},d))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var r=t.length,s=new Array(r);s[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>s,default:()=>m,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var a=t(7462),o=(t(7294),t(3905));const r={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/docs/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/docs/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-seldon.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/docs/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/docs/setup-components/install-components-pg"}},i={},p=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Selon-Core \uc124\uce58",id:"selon-core-\uc124\uce58",level:2},{value:"Ambassador - Helm Repository \ucd94\uac00",id:"ambassador---helm-repository-\ucd94\uac00",level:3},{value:"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],d={toc:p},c="wrapper";function m(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub808\uc784\uc6cc\ud06c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 Seldon-Core \uc758 \uacf5\uc2dd ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"\uc81c\ud488 \uc124\uba85 \ud398\uc774\uc9c0")," \uc640 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"\uae43\ud5d9")," \uadf8\ub9ac\uace0 API Deployment \ud30c\ud2b8\ub97c \ucc38\uace0\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"selon-core-\uc124\uce58"},"Selon-Core \uc124\uce58"),(0,o.kt)("p",null,"Seldon-Core\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc778\uadf8\ub808\uc2a4(Ingress)\ub97c \ub2f4\ub2f9\ud558\ub294 Ambassador \uc640 Istio \uc640 \uac19\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"\ubaa8\ub4c8\uc774 \ud544\uc694\ud569\ub2c8\ub2e4"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core \uc5d0\uc11c\ub294 Ambassador \uc640 Istio \ub9cc\uc744 \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uba70, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Ambassador\ub97c \uc0ac\uc6a9\ud574 Seldon-core\ub97c \uc0ac\uc6a9\ud558\ubbc0\ub85c Ambassador\ub97c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"ambassador---helm-repository-\ucd94\uac00"},"Ambassador - Helm Repository \ucd94\uac00"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"ambassador Chart 6.9.3 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"seldon-system \uc5d0 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"seldon-core-operator Chart 1.11.2 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"seldon-system namespace \uc5d0 1 \uac1c\uc758 seldon-controller-manager pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9925],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function s(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),u=o,b=c["".concat(i,".").concat(u)]||c[u]||m[u]||r;return t?a.createElement(b,s(s({ref:n},d),{},{components:t})):a.createElement(b,s({ref:n},d))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var r=t.length,s=new Array(r);s[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>s,default:()=>m,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var a=t(7462),o=(t(7294),t(3905));const r={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/docs/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/docs/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-seldon.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/docs/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/docs/setup-components/install-components-pg"}},i={},p=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Selon-Core \uc124\uce58",id:"selon-core-\uc124\uce58",level:2},{value:"Ambassador - Helm Repository \ucd94\uac00",id:"ambassador---helm-repository-\ucd94\uac00",level:3},{value:"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],d={toc:p},c="wrapper";function m(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub808\uc784\uc6cc\ud06c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 Seldon-Core \uc758 \uacf5\uc2dd ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"\uc81c\ud488 \uc124\uba85 \ud398\uc774\uc9c0")," \uc640 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"\uae43\ud5d9")," \uadf8\ub9ac\uace0 API Deployment \ud30c\ud2b8\ub97c \ucc38\uace0\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"selon-core-\uc124\uce58"},"Selon-Core \uc124\uce58"),(0,o.kt)("p",null,"Seldon-Core\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc778\uadf8\ub808\uc2a4(Ingress)\ub97c \ub2f4\ub2f9\ud558\ub294 Ambassador \uc640 Istio \uc640 \uac19\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"\ubaa8\ub4c8\uc774 \ud544\uc694\ud569\ub2c8\ub2e4"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core \uc5d0\uc11c\ub294 Ambassador \uc640 Istio \ub9cc\uc744 \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uba70, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Ambassador\ub97c \uc0ac\uc6a9\ud574 Seldon-core\ub97c \uc0ac\uc6a9\ud558\ubbc0\ub85c Ambassador\ub97c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"ambassador---helm-repository-\ucd94\uac00"},"Ambassador - Helm Repository \ucd94\uac00"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"ambassador Chart 6.9.3 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"seldon-system \uc5d0 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"seldon-core-operator Chart 1.11.2 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"seldon-system namespace \uc5d0 1 \uac1c\uc758 seldon-controller-manager pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/165f2d18.ff4ce9f2.js b/assets/js/165f2d18.abd35334.js similarity index 97% rename from assets/js/165f2d18.ff4ce9f2.js rename to assets/js/165f2d18.abd35334.js index f43f4f4a..ad5b4ebb 100644 --- a/assets/js/165f2d18.ff4ce9f2.js +++ b/assets/js/165f2d18.abd35334.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6397],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),d=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=d(e.components);return o.createElement(l.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=d(r),b=n,f=u["".concat(l,".").concat(b)]||u[b]||c[b]||a;return r?o.createElement(f,s(s({ref:t},p),{},{components:r})):o.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[u]="string"==typeof e?e:n,s[1]=i;for(var d=2;d{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>d});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"version-1.0/kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/docs/1.0/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/docs/1.0/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/docs/1.0/kubeflow-dashboard-guide/volumes"}},l={},d=[],p={toc:d},u="wrapper";function c(e){let{components:t,...a}=e;return(0,n.kt)(u,(0,o.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Tensorboards\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(1277).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"Tensorboards \ud0ed\uc740 Tensorflow, PyTorch \ub4f1\uc758 \ud504\ub808\uc784\uc6cc\ud06c\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 Tensorboard \uc720\ud2f8\uc774 \uc0dd\uc131\ud55c ML \ud559\uc2b5 \uad00\ub828 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84(Tensorboard Server)\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc0dd\uc131\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc774\ub807\uac8c \uc0dd\uc131\ud55c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\ub294, \uc77c\ubc18\uc801\uc778 \uc6d0\uaca9 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc758 \uc0ac\uc6a9\ubc95\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc73c\uba70, ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc5d0\uc11c \ubc14\ub85c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\ub294 \uc6a9\ub3c4"),"\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc758 \uacb0\uacfc\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ubc29\ubc95\uc5d0\ub294 ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/"},"\ub2e4\uc591\ud55c \ubc29\uc2dd"),"\uc774 \uc788\uc73c\uba70, ",(0,n.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub354 \uc77c\ubc18\uc801\uc73c\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d Kubeflow \ucef4\ud3ec\ub10c\ud2b8\uc758 Visualization \uae30\ub2a5\uacfc MLflow\uc758 \uc2dc\uac01\ud654 \uae30\ub2a5\uc744 \ud65c\uc6a9\ud560 \uc608\uc815\uc774\ubbc0\ub85c, Tensorboards \ud398\uc774\uc9c0\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 \uc0dd\ub7b5\ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0},6316:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1277:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6397],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),d=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=d(e.components);return o.createElement(l.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=d(r),b=n,f=u["".concat(l,".").concat(b)]||u[b]||c[b]||a;return r?o.createElement(f,s(s({ref:t},p),{},{components:r})):o.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[u]="string"==typeof e?e:n,s[1]=i;for(var d=2;d{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>d});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"version-1.0/kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/docs/1.0/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/docs/1.0/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/docs/1.0/kubeflow-dashboard-guide/volumes"}},l={},d=[],p={toc:d},u="wrapper";function c(e){let{components:t,...a}=e;return(0,n.kt)(u,(0,o.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Tensorboards\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(1277).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"Tensorboards \ud0ed\uc740 Tensorflow, PyTorch \ub4f1\uc758 \ud504\ub808\uc784\uc6cc\ud06c\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 Tensorboard \uc720\ud2f8\uc774 \uc0dd\uc131\ud55c ML \ud559\uc2b5 \uad00\ub828 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84(Tensorboard Server)\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc0dd\uc131\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc774\ub807\uac8c \uc0dd\uc131\ud55c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\ub294, \uc77c\ubc18\uc801\uc778 \uc6d0\uaca9 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc758 \uc0ac\uc6a9\ubc95\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc73c\uba70, ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc5d0\uc11c \ubc14\ub85c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\ub294 \uc6a9\ub3c4"),"\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc758 \uacb0\uacfc\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ubc29\ubc95\uc5d0\ub294 ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/"},"\ub2e4\uc591\ud55c \ubc29\uc2dd"),"\uc774 \uc788\uc73c\uba70, ",(0,n.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub354 \uc77c\ubc18\uc801\uc73c\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d Kubeflow \ucef4\ud3ec\ub10c\ud2b8\uc758 Visualization \uae30\ub2a5\uacfc MLflow\uc758 \uc2dc\uac01\ud654 \uae30\ub2a5\uc744 \ud65c\uc6a9\ud560 \uc608\uc815\uc774\ubbc0\ub85c, Tensorboards \ud398\uc774\uc9c0\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 \uc0dd\ub7b5\ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0},6316:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1277:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file diff --git a/assets/js/1671dd43.ea405406.js b/assets/js/1671dd43.89032e00.js similarity index 99% rename from assets/js/1671dd43.ea405406.js rename to assets/js/1671dd43.89032e00.js index 80f311e6..50272e44 100644 --- a/assets/js/1671dd43.ea405406.js +++ b/assets/js/1671dd43.89032e00.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6572],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>m});var i=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);n&&(i=i.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,i)}return t}function s(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var u=i.createContext({}),o=function(e){var n=i.useContext(u),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},p=function(e){var n=o(e.components);return i.createElement(u.Provider,{value:n},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return i.createElement(i.Fragment,{},n)}},b=i.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,u=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),d=o(t),b=r,m=d["".concat(u,".").concat(b)]||d[b]||k[b]||a;return t?i.createElement(m,s(s({ref:n},p),{},{components:t})):i.createElement(m,s({ref:n},p))}));function m(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,s=new Array(a);s[0]=b;var l={};for(var u in n)hasOwnProperty.call(n,u)&&(l[u]=n[u]);l.originalType=e,l[d]="string"==typeof e?e:r,s[1]=l;for(var o=2;o{t.r(n),t.d(n,{assets:()=>u,contentTitle:()=>s,default:()=>k,frontMatter:()=>a,metadata:()=>l,toc:()=>o});var i=t(7462),r=(t(7294),t(3905));const a={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:o},d="wrapper";function k(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,i.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,r.kt)("p",null,"Minikube\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, v1.24.0 \ubc84\uc804\uc758 Minikube \ubc14\uc774\ub108\ub9ac\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,r.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\uc81c Minikube\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uad6c\ucd95\ud569\ub2c8\ub2e4.\nGPU \uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uacfc \ud074\ub7ec\uc2a4\ud130-\ud074\ub77c\uc774\uc5b8\ud2b8 \uac04 \ud1b5\uc2e0\uc744 \uac04\ud3b8\ud558\uac8c \uc218\ud589\ud558\uae30 \uc704\ud574, Minikube \ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc744 \ud65c\uc6a9\ud558\uc5ec \uc2e4\ud589\ud569\ub2c8\ub2e4. ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc740 root user \ub85c \uc2e4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"root user\ub85c \uc804\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"minikube start"),"\ub97c \uc218\ud589\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uad6c\ucd95\uc744 \uc9c4\ud589\ud569\ub2c8\ub2e4. Kubeflow\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud574, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc84\uc804\uc740 v1.21.7\ub85c \uc9c0\uc815\ud558\uc5ec \uad6c\ucd95\ud558\uba70 ",(0,r.kt)("inlineCode",{parentName:"p"},"--extra-config"),"\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,r.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,r.kt)("p",null,"Minikube\ub97c \uc124\uce58\ud558\uba74 Default\ub85c \uc124\uce58\ub418\ub294 addon\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774 \uc911 \uc800\ud76c\uac00 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\uc744 addon\uc744 \ube44\ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,r.kt)("p",null,"\ubaa8\ub4e0 addon\uc774 \ube44\ud65c\uc131\ud654\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,r.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud55c \ub3c4\uad6c\ub97c \uc124\uce58\ud569\ub2c8\ub2e4.\n",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0\uc5d0\ub294 root user\ub85c \ubaa8\ub4e0 \uc791\uc5c5\uc744 \uc9c4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758\ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, \uc6b0\uc120 kubernetes\uc758 \uad00\ub9ac\uc790 \uc778\uc99d \uc815\ubcf4\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub85c \uac00\uc838\uc635\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0\uc11c config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\nminikube kubectl -- config view --flatten\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uc815\ubcf4\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,r.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,r.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},".kube")," \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\nmkdir -p /home/$USER/.kube\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud574\ub2f9 \ud30c\uc77c\uc5d0 2. \uc5d0\uc11c \ucd9c\ub825\ub41c \uc815\ubcf4\ub97c \ubd99\uc5ec\ub123\uc740 \ub4a4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,r.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"helm"),(0,r.kt)("li",{parentName:"ul"},"kustomize"),(0,r.kt)("li",{parentName:"ul"},"CSI plugin"),(0,r.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,r.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6572],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>m});var i=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);n&&(i=i.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,i)}return t}function s(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var u=i.createContext({}),o=function(e){var n=i.useContext(u),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},p=function(e){var n=o(e.components);return i.createElement(u.Provider,{value:n},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return i.createElement(i.Fragment,{},n)}},b=i.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,u=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),d=o(t),b=r,m=d["".concat(u,".").concat(b)]||d[b]||k[b]||a;return t?i.createElement(m,s(s({ref:n},p),{},{components:t})):i.createElement(m,s({ref:n},p))}));function m(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,s=new Array(a);s[0]=b;var l={};for(var u in n)hasOwnProperty.call(n,u)&&(l[u]=n[u]);l.originalType=e,l[d]="string"==typeof e?e:r,s[1]=l;for(var o=2;o{t.r(n),t.d(n,{assets:()=>u,contentTitle:()=>s,default:()=>k,frontMatter:()=>a,metadata:()=>l,toc:()=>o});var i=t(7462),r=(t(7294),t(3905));const a={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:o},d="wrapper";function k(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,i.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,r.kt)("p",null,"Minikube\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, v1.24.0 \ubc84\uc804\uc758 Minikube \ubc14\uc774\ub108\ub9ac\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,r.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\uc81c Minikube\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uad6c\ucd95\ud569\ub2c8\ub2e4.\nGPU \uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uacfc \ud074\ub7ec\uc2a4\ud130-\ud074\ub77c\uc774\uc5b8\ud2b8 \uac04 \ud1b5\uc2e0\uc744 \uac04\ud3b8\ud558\uac8c \uc218\ud589\ud558\uae30 \uc704\ud574, Minikube \ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc744 \ud65c\uc6a9\ud558\uc5ec \uc2e4\ud589\ud569\ub2c8\ub2e4. ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc740 root user \ub85c \uc2e4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"root user\ub85c \uc804\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"minikube start"),"\ub97c \uc218\ud589\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uad6c\ucd95\uc744 \uc9c4\ud589\ud569\ub2c8\ub2e4. Kubeflow\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud574, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc84\uc804\uc740 v1.21.7\ub85c \uc9c0\uc815\ud558\uc5ec \uad6c\ucd95\ud558\uba70 ",(0,r.kt)("inlineCode",{parentName:"p"},"--extra-config"),"\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,r.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,r.kt)("p",null,"Minikube\ub97c \uc124\uce58\ud558\uba74 Default\ub85c \uc124\uce58\ub418\ub294 addon\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774 \uc911 \uc800\ud76c\uac00 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\uc744 addon\uc744 \ube44\ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,r.kt)("p",null,"\ubaa8\ub4e0 addon\uc774 \ube44\ud65c\uc131\ud654\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,r.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud55c \ub3c4\uad6c\ub97c \uc124\uce58\ud569\ub2c8\ub2e4.\n",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0\uc5d0\ub294 root user\ub85c \ubaa8\ub4e0 \uc791\uc5c5\uc744 \uc9c4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758\ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, \uc6b0\uc120 kubernetes\uc758 \uad00\ub9ac\uc790 \uc778\uc99d \uc815\ubcf4\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub85c \uac00\uc838\uc635\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0\uc11c config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\nminikube kubectl -- config view --flatten\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uc815\ubcf4\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,r.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,r.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},".kube")," \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\nmkdir -p /home/$USER/.kube\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud574\ub2f9 \ud30c\uc77c\uc5d0 2. \uc5d0\uc11c \ucd9c\ub825\ub41c \uc815\ubcf4\ub97c \ubd99\uc5ec\ub123\uc740 \ub4a4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,r.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"helm"),(0,r.kt)("li",{parentName:"ul"},"kustomize"),(0,r.kt)("li",{parentName:"ul"},"CSI plugin"),(0,r.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,r.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/167d5ab1.411bad63.js b/assets/js/167d5ab1.7b4f8e38.js similarity index 99% rename from assets/js/167d5ab1.411bad63.js rename to assets/js/167d5ab1.7b4f8e38.js index 6bc14ba8..a89bdcdd 100644 --- a/assets/js/167d5ab1.411bad63.js +++ b/assets/js/167d5ab1.7b4f8e38.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4177],{3905:(n,e,t)=>{t.d(e,{Zo:()=>m,kt:()=>c});var a=t(7294);function r(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function p(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(r[t]=n[t]);return r}(n,e);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(r[t]=n[t])}return r}var o=a.createContext({}),s=function(n){var e=a.useContext(o),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},m=function(n){var e=s(n.components);return a.createElement(o.Provider,{value:e},n.children)},d="mdxType",_={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,r=n.mdxType,p=n.originalType,o=n.parentName,m=l(n,["components","mdxType","originalType","parentName"]),d=s(t),u=r,c=d["".concat(o,".").concat(u)]||d[u]||_[u]||p;return t?a.createElement(c,i(i({ref:e},m),{},{components:t})):a.createElement(c,i({ref:e},m))}));function c(n,e){var t=arguments,r=e&&e.mdxType;if("string"==typeof n||r){var p=t.length,i=new Array(p);i[0]=u;var l={};for(var o in e)hasOwnProperty.call(e,o)&&(l[o]=e[o]);l.originalType=n,l[d]="string"==typeof n?n:r,i[1]=l;for(var s=2;s{t.r(e),t.d(e,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>p,metadata:()=>l,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const p={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/advanced-environment",id:"version-1.0/kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/docs/1.0/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-environment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/1.0/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/docs/1.0/kubeflow/advanced-pipeline"}},o={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95",id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],m={toc:s},d="wrapper";function _(n){let{components:e,...t}=n;return(0,r.kt)(d,(0,a.Z)({},m,t,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),"\uc5d0\uc11c \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uba74 \uc2e4\ud328\ud558\uac8c \ub429\ub2c8\ub2e4. \uc65c \uc2e4\ud328\ud558\ub294\uc9c0 \uc54c\uc544\ubcf4\uace0 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uc218 \uc788\ub3c4\ub85d \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-component#convert-to-kubeflow-format"},"\uc55e\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8"),"\ub97c yaml\ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc744 \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc5d0 \ub530\ub974\uba74 \uc774 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \uc704\uc5d0\uc11c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc624\ub958\uac00 \ubc1c\uc0dd\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ub418\ub294 \ubc29\uc2dd\uc5d0 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \uac01\uac01 \ub3c5\ub9bd\ub41c \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc790\uc138\ud788 \ubcf4\uba74 \uc0dd\uc131\ub41c \ub9cc\ub4e0 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \uc5d0\uc11c \uc815\ud574\uc9c4 \uc774\ubbf8\uc9c0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc5b4\ub5a4 \uc774\uc720 \ub54c\ubb38\uc5d0 \uc2e4\ud589\uc774 \uc548 \ub418\ub294\uc9c0 \ub208\uce58\ucc44\uc2e0 \ubd84\ub4e4\ub3c4 \uc788\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," \uc774\ubbf8\uc9c0\uc5d0\ub294 \uc6b0\ub9ac\uac00 \uc0ac\uc6a9\ud558\uace0\uc790 \ud558\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn")," \uc774 \uc124\uce58\ub418\uc5b4 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7ec\ubbc0\ub85c \uc2e4\ud589\ud560 \ub54c \ud574\ub2f9 \ud328\ud0a4\uc9c0\uac00 \uc874\uc7ac\ud558\uc9c0 \uc54a\ub294\ub2e4\ub294 \uc5d0\ub7ec\uc640 \ud568\uaed8 \uc2e4\ud589\uc774 \uc548 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub7fc \uc5b4\ub5bb\uac8c \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc744\uae4c\uc694?"),(0,r.kt)("h2",{id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95"},"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95"),(0,r.kt)("p",null,"Kubeflow\ub97c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub450 \uac00\uc9c0 \ubc29\ubc95\uc744 \ud1b5\ud574 \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image")," \uc0ac\uc6a9"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install")," \uc0ac\uc6a9")),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud560 \ub54c \uc0ac\uc6a9\ud588\ub358 \ud568\uc218 ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," \uac00 \uc5b4\ub5a4 argument\ub4e4\uc744 \ubc1b\uc744 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": \ucef4\ud3ec\ub10c\ud2b8\ub85c \ub9cc\ub4e4 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c \ud568\uc218"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ud560 \uc774\ubbf8\uc9c0"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc11c \ucd94\uac00\ub85c \uc124\uce58\ud574\uc57c \ud558\ub294 \ud328\ud0a4\uc9c0")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc0ac\uc6a9\ud558\ub294 base_image\uc5d0 \ud328\ud0a4\uc9c0\ub4e4\uc774 \uc804\ubd80 \uc124\uce58\ub418\uc5b4 \uc788\ub2e4\uba74 \ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0 \uc124\uce58 \uc5c6\uc774 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 Dockerfile\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"\uc704\uc758 Dockerfile\uc744 \uc774\uc6a9\ud574 \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \uc2e4\uc2b5\uc5d0\uc11c \uc0ac\uc6a9\ud574\ubcfc \ub3c4\ucee4 \ud5c8\ube0c\ub294 ghcr\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uac01\uc790 \ud658\uacbd\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ub3c4\ucee4 \ud5c8\ube0c\ub97c \uc120\ud0dd \ud6c4 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"\uc774\uc81c base_image\ub97c \uc785\ub825\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"base_image\uac00 \uc6b0\ub9ac\uac00 \uc124\uc815\ud55c \uac12\uc73c\ub85c \ubc14\ub010 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \ud328\ud0a4\uc9c0\uac00 \ucd94\uac00\ub420 \ub54c\ub9c8\ub2e4 docker \uc774\ubbf8\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \uc0c8\ub85c \uc0dd\uc131\ud558\ub294 \uc791\uc5c5\uc740 \ub9ce\uc740 \uc2dc\uac04\uc774 \uc18c\uc694\ub429\ub2c8\ub2e4.\n\uc774 \ub54c, ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument \ub97c \uc0ac\uc6a9\ud558\uba74 \ud328\ud0a4\uc9c0\ub97c \ucee8\ud14c\uc774\ub108\uc5d0 \uc27d\uac8c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"\uc704\uc5d0 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\uc0dd\uc131\ub41c yaml \ud30c\uc77c\uc744 \uc790\uc138\ud788 \ubcf4\uba74, \ub2e4\uc74c\uacfc \uac19\uc740 \uc904\uc774 \uc790\ub3d9\uc73c\ub85c \ucd94\uac00\ub418\uc5b4 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uae30 \ub54c\ubb38\uc5d0 \uc624\ub958 \uc5c6\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}_.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4177],{3905:(n,e,t)=>{t.d(e,{Zo:()=>m,kt:()=>c});var a=t(7294);function r(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function p(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(r[t]=n[t]);return r}(n,e);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(r[t]=n[t])}return r}var o=a.createContext({}),s=function(n){var e=a.useContext(o),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},m=function(n){var e=s(n.components);return a.createElement(o.Provider,{value:e},n.children)},d="mdxType",_={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,r=n.mdxType,p=n.originalType,o=n.parentName,m=l(n,["components","mdxType","originalType","parentName"]),d=s(t),u=r,c=d["".concat(o,".").concat(u)]||d[u]||_[u]||p;return t?a.createElement(c,i(i({ref:e},m),{},{components:t})):a.createElement(c,i({ref:e},m))}));function c(n,e){var t=arguments,r=e&&e.mdxType;if("string"==typeof n||r){var p=t.length,i=new Array(p);i[0]=u;var l={};for(var o in e)hasOwnProperty.call(e,o)&&(l[o]=e[o]);l.originalType=n,l[d]="string"==typeof n?n:r,i[1]=l;for(var s=2;s{t.r(e),t.d(e,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>p,metadata:()=>l,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const p={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/advanced-environment",id:"version-1.0/kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/docs/1.0/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-environment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/1.0/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/docs/1.0/kubeflow/advanced-pipeline"}},o={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95",id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],m={toc:s},d="wrapper";function _(n){let{components:e,...t}=n;return(0,r.kt)(d,(0,a.Z)({},m,t,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),"\uc5d0\uc11c \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uba74 \uc2e4\ud328\ud558\uac8c \ub429\ub2c8\ub2e4. \uc65c \uc2e4\ud328\ud558\ub294\uc9c0 \uc54c\uc544\ubcf4\uace0 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uc218 \uc788\ub3c4\ub85d \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-component#convert-to-kubeflow-format"},"\uc55e\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8"),"\ub97c yaml\ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc744 \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc5d0 \ub530\ub974\uba74 \uc774 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \uc704\uc5d0\uc11c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc624\ub958\uac00 \ubc1c\uc0dd\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ub418\ub294 \ubc29\uc2dd\uc5d0 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \uac01\uac01 \ub3c5\ub9bd\ub41c \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc790\uc138\ud788 \ubcf4\uba74 \uc0dd\uc131\ub41c \ub9cc\ub4e0 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \uc5d0\uc11c \uc815\ud574\uc9c4 \uc774\ubbf8\uc9c0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc5b4\ub5a4 \uc774\uc720 \ub54c\ubb38\uc5d0 \uc2e4\ud589\uc774 \uc548 \ub418\ub294\uc9c0 \ub208\uce58\ucc44\uc2e0 \ubd84\ub4e4\ub3c4 \uc788\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," \uc774\ubbf8\uc9c0\uc5d0\ub294 \uc6b0\ub9ac\uac00 \uc0ac\uc6a9\ud558\uace0\uc790 \ud558\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn")," \uc774 \uc124\uce58\ub418\uc5b4 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7ec\ubbc0\ub85c \uc2e4\ud589\ud560 \ub54c \ud574\ub2f9 \ud328\ud0a4\uc9c0\uac00 \uc874\uc7ac\ud558\uc9c0 \uc54a\ub294\ub2e4\ub294 \uc5d0\ub7ec\uc640 \ud568\uaed8 \uc2e4\ud589\uc774 \uc548 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub7fc \uc5b4\ub5bb\uac8c \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc744\uae4c\uc694?"),(0,r.kt)("h2",{id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95"},"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95"),(0,r.kt)("p",null,"Kubeflow\ub97c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub450 \uac00\uc9c0 \ubc29\ubc95\uc744 \ud1b5\ud574 \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image")," \uc0ac\uc6a9"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install")," \uc0ac\uc6a9")),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud560 \ub54c \uc0ac\uc6a9\ud588\ub358 \ud568\uc218 ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," \uac00 \uc5b4\ub5a4 argument\ub4e4\uc744 \ubc1b\uc744 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": \ucef4\ud3ec\ub10c\ud2b8\ub85c \ub9cc\ub4e4 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c \ud568\uc218"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ud560 \uc774\ubbf8\uc9c0"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc11c \ucd94\uac00\ub85c \uc124\uce58\ud574\uc57c \ud558\ub294 \ud328\ud0a4\uc9c0")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc0ac\uc6a9\ud558\ub294 base_image\uc5d0 \ud328\ud0a4\uc9c0\ub4e4\uc774 \uc804\ubd80 \uc124\uce58\ub418\uc5b4 \uc788\ub2e4\uba74 \ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0 \uc124\uce58 \uc5c6\uc774 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 Dockerfile\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"\uc704\uc758 Dockerfile\uc744 \uc774\uc6a9\ud574 \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \uc2e4\uc2b5\uc5d0\uc11c \uc0ac\uc6a9\ud574\ubcfc \ub3c4\ucee4 \ud5c8\ube0c\ub294 ghcr\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uac01\uc790 \ud658\uacbd\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ub3c4\ucee4 \ud5c8\ube0c\ub97c \uc120\ud0dd \ud6c4 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"\uc774\uc81c base_image\ub97c \uc785\ub825\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"base_image\uac00 \uc6b0\ub9ac\uac00 \uc124\uc815\ud55c \uac12\uc73c\ub85c \ubc14\ub010 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \ud328\ud0a4\uc9c0\uac00 \ucd94\uac00\ub420 \ub54c\ub9c8\ub2e4 docker \uc774\ubbf8\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \uc0c8\ub85c \uc0dd\uc131\ud558\ub294 \uc791\uc5c5\uc740 \ub9ce\uc740 \uc2dc\uac04\uc774 \uc18c\uc694\ub429\ub2c8\ub2e4.\n\uc774 \ub54c, ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument \ub97c \uc0ac\uc6a9\ud558\uba74 \ud328\ud0a4\uc9c0\ub97c \ucee8\ud14c\uc774\ub108\uc5d0 \uc27d\uac8c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"\uc704\uc5d0 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\uc0dd\uc131\ub41c yaml \ud30c\uc77c\uc744 \uc790\uc138\ud788 \ubcf4\uba74, \ub2e4\uc74c\uacfc \uac19\uc740 \uc904\uc774 \uc790\ub3d9\uc73c\ub85c \ucd94\uac00\ub418\uc5b4 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uae30 \ub54c\ubb38\uc5d0 \uc624\ub958 \uc5c6\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}_.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/1df93b7f.c4fb40a2.js b/assets/js/1df93b7f.21501263.js similarity index 95% rename from en/assets/js/1df93b7f.c4fb40a2.js rename to assets/js/1df93b7f.21501263.js index 471c3225..365a2e48 100644 --- a/en/assets/js/1df93b7f.c4fb40a2.js +++ b/assets/js/1df93b7f.21501263.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3237],{9722:(e,t,a)=>{a.d(t,{Z:()=>c});var l,r=a(7294);function n(){return n=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...c}=e;return r.createElement("svg",n({xmlns:"http://www.w3.org/2000/svg",width:1088,height:687.962,viewBox:"0 0 1088 687.962","aria-labelledby":a},c),void 0===t?r.createElement("title",{id:a},"Easy to Use"):t?r.createElement("title",{id:a},t):null,l||(l=r.createElement("g",{"data-name":"Group 12"},r.createElement("g",{"data-name":"Group 11"},r.createElement("path",{"data-name":"Path 83",d:"M961.81 454.442c-5.27 45.15-16.22 81.4-31.25 110.31-20 38.52-54.21 54.04-84.77 70.28a193.275 193.275 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.282 657.282 0 0 0-104.09-13.16q-14.97-.675-29.97-.67c-15.42.02-293.07 5.29-360.67-131.57-16.69-33.76-28.13-75-32.24-125.27-11.63-142.12 52.29-235.46 134.74-296.47 155.97-115.41 369.76-110.57 523.43 7.88 102.36 78.9 198.2 198.31 179.02 362.74Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 84",d:"M930.56 564.752c-20 38.52-47.21 64.04-77.77 80.28a193.272 193.272 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.3 657.3 0 0 0-104.09-13.16q-14.97-.675-29.97-.67-23.13.03-46.25 1.72c-100.17 7.36-253.82-6.43-321.42-143.29L326 177.962l62.95 161.619 20.09 51.59 55.37-75.98L493 275.962l130.2 149.27 36.8-81.27 254.78 207.919 14.21 11.59Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 85",d:"m302 282.962 26-57 36 83-31-60Z",opacity:.1}),r.createElement("path",{"data-name":"Path 86",d:"M554.5 647.802q-14.97-.675-29.97-.67l-115.49-255.96Z",opacity:.1}),r.createElement("path",{"data-name":"Path 87",d:"M464.411 315.191 493 292.962l130 150-132-128Z",opacity:.1}),r.createElement("path",{"data-name":"Path 88",d:"M852.79 645.032a193.265 193.265 0 0 1-27.46 11.94L623.2 425.232Z",opacity:.1}),r.createElement("circle",{"data-name":"Ellipse 11",cx:3,cy:3,r:3,transform:"translate(479 98.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 12",cx:3,cy:3,r:3,transform:"translate(396 201.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 13",cx:2,cy:2,r:2,transform:"translate(600 220.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 14",cx:2,cy:2,r:2,transform:"translate(180 265.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 15",cx:2,cy:2,r:2,transform:"translate(612 96.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 16",cx:2,cy:2,r:2,transform:"translate(736 192.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 17",cx:2,cy:2,r:2,transform:"translate(858 344.962)",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 89",d:"M306 121.222h-2.76v-2.76h-1.48v2.76H299v1.478h2.76v2.759h1.48V122.7H306Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 90",d:"M848 424.222h-2.76v-2.76h-1.48v2.76H841v1.478h2.76v2.759h1.48V425.7H848Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 91",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 92",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",opacity:.1}),r.createElement("ellipse",{"data-name":"Ellipse 18",cx:544,cy:30,rx:544,ry:30,transform:"translate(0 583.962)",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 93",d:"M568 571.962c0 33.137-14.775 24-33 24s-33 9.137-33-24 33-96 33-96 33 62.863 33 96Z",fill:"#ff6584"}),r.createElement("path",{"data-name":"Path 94",d:"M550 584.641c0 15.062-6.716 10.909-15 10.909s-15 4.153-15-10.909 15-43.636 15-43.636 15 28.576 15 43.636Z",opacity:.1}),r.createElement("rect",{"data-name":"Rectangle 97",width:92,height:18,rx:9,transform:"translate(489 604.962)",fill:"#2f2e41"}),r.createElement("rect",{"data-name":"Rectangle 98",width:92,height:18,rx:9,transform:"translate(489 586.962)",fill:"#2f2e41"}),r.createElement("path",{"data-name":"Path 95",d:"M137 490.528c0 55.343 34.719 100.126 77.626 100.126",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 96",d:"M214.626 590.654c0-55.965 38.745-101.251 86.626-101.251",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 97",d:"M165.125 495.545c0 52.57 22.14 95.109 49.5 95.109",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 98",d:"M214.626 590.654c0-71.511 44.783-129.377 100.126-129.377",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 99",d:"M198.3 591.36s11.009-.339 14.326-2.7 16.934-5.183 17.757-1.395 16.544 18.844 4.115 18.945-28.879-1.936-32.19-3.953-4.008-10.897-4.008-10.897Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 100",d:"M234.716 604.89c-12.429.1-28.879-1.936-32.19-3.953-2.522-1.536-3.527-7.048-3.863-9.591l-.368.014s.7 8.879 4.009 10.9 19.761 4.053 32.19 3.953c3.588-.029 4.827-1.305 4.759-3.2-.498 1.142-1.867 1.855-4.537 1.877Z",opacity:.2}),r.createElement("path",{"data-name":"Path 101",d:"M721.429 527.062c0 38.029 23.857 68.8 53.341 68.8",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 102",d:"M774.769 595.863c0-38.456 26.623-69.575 59.525-69.575",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 103",d:"M740.755 530.509c0 36.124 15.213 65.354 34.014 65.354",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 104",d:"M774.769 595.863c0-49.139 30.773-88.9 68.8-88.9",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 105",d:"M763.548 596.348s7.565-.233 9.844-1.856 11.636-3.562 12.2-.958 11.368 12.949 2.828 13.018-19.844-1.33-22.119-2.716-2.753-7.488-2.753-7.488Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 106",d:"M788.574 605.645c-8.54.069-19.844-1.33-22.119-2.716-1.733-1.056-2.423-4.843-2.654-6.59l-.253.01s.479 6.1 2.755 7.487 13.579 2.785 22.119 2.716c2.465-.02 3.317-.9 3.27-2.2-.343.788-1.283 1.278-3.118 1.293Z",opacity:.2}),r.createElement("path",{"data-name":"Path 107",d:"M893.813 618.699s11.36-1.729 14.5-4.591 16.89-7.488 18.217-3.667 19.494 17.447 6.633 19.107-30.153 1.609-33.835-.065-5.515-10.784-5.515-10.784Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 108",d:"M933.228 628.154c-12.86 1.659-30.153 1.609-33.835-.065-2.8-1.275-4.535-6.858-5.2-9.45l-.379.061s1.833 9.109 5.516 10.783 20.975 1.725 33.835.065c3.712-.479 4.836-1.956 4.529-3.906-.375 1.246-1.703 2.156-4.466 2.512Z",opacity:.2}),r.createElement("path",{"data-name":"Path 109",d:"M614.26 617.881s9.587-1.459 12.237-3.875 14.255-6.32 15.374-3.095 16.452 14.725 5.6 16.125-25.448 1.358-28.555-.055-4.656-9.1-4.656-9.1Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 110",d:"M647.524 625.856c-10.853 1.4-25.448 1.358-28.555-.055-2.367-1.076-3.827-5.788-4.39-7.976l-.32.051s1.547 7.687 4.655 9.1 17.7 1.456 28.555.055c3.133-.4 4.081-1.651 3.822-3.3-.314 1.057-1.435 1.825-3.767 2.125Z",opacity:.2}),r.createElement("path",{"data-name":"Path 111",d:"M122.389 613.09s7.463-1.136 9.527-3.016 11.1-4.92 11.969-2.409 12.808 11.463 4.358 12.553-19.811 1.057-22.23-.043-3.624-7.085-3.624-7.085Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 112",d:"M148.285 619.302c-8.449 1.09-19.811 1.057-22.23-.043-1.842-.838-2.979-4.506-3.417-6.209l-.249.04s1.2 5.984 3.624 7.085 13.781 1.133 22.23.043c2.439-.315 3.177-1.285 2.976-2.566-.246.818-1.119 1.416-2.934 1.65Z",opacity:.2}),r.createElement("path",{"data-name":"Path 113",d:"M383.7 601.318c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.118-36.793 93.694-36.793 93.08 6.573 93.08 36.793Z",opacity:.1}),r.createElement("path",{"data-name":"Path 114",d:"M383.7 593.881c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.114-36.8 93.69-36.8 93.084 6.576 93.084 36.8Z",fill:"#3f3d56"})),r.createElement("path",{"data-name":"Path 40",d:"M360.175 475.732h91.791v37.153h-91.791Z",fill:"#fff",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 41",d:"M277.126 597.026a21.828 21.828 0 0 1-18.908-10.927 21.829 21.829 0 0 0 18.908 32.782h21.855v-21.855Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 42",d:"m375.451 481.607 76.514-4.782v-10.928a21.854 21.854 0 0 0-21.855-21.855h-98.347l-2.732-4.735a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.732-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.731-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.735h-.071l-4.526-4.525a3.153 3.153 0 0 0-5.276 1.414l-1.5 5.577-5.674-1.521a3.154 3.154 0 0 0-3.863 3.864l1.52 5.679-5.575 1.494a3.155 3.155 0 0 0-1.416 5.278l4.526 4.526v.07l-4.735 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.727a3.154 3.154 0 0 0 0 5.464l4.735 2.736-4.735 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.735a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.728a3.154 3.154 0 0 0 0 5.464l4.732 2.732a21.854 21.854 0 0 0 21.858 21.855h131.13a21.854 21.854 0 0 0 21.855-21.855v-87.42l-76.514-4.782a11.632 11.632 0 0 1 0-23.219",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 43",d:"M408.255 618.882h32.782v-43.71h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 44",d:"M462.893 591.563a5.438 5.438 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 45",d:"M419.183 553.317h32.782v-21.855h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 46",d:"M462.893 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.814 2.814 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 47",d:"M320.836 479.556a2.732 2.732 0 0 1-2.732-2.732 8.2 8.2 0 0 0-16.391 0 2.732 2.732 0 0 1-5.464 0 13.66 13.66 0 0 1 27.319 0 2.732 2.732 0 0 1-2.732 2.732",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 48",d:"M364.546 618.881h65.565a21.854 21.854 0 0 0 21.855-21.855v-76.492h-65.565a21.854 21.854 0 0 0-21.855 21.855Z",fill:"#ffff50",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 49",d:"M435.596 554.41h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0-54.434h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.652h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m16.369-100.959c-.013 0-.024-.007-.037-.005-3.377.115-4.974 3.492-6.384 6.472-1.471 3.114-2.608 5.139-4.473 5.078-2.064-.074-3.244-2.406-4.494-4.874-1.436-2.835-3.075-6.049-6.516-5.929-3.329.114-4.932 3.053-6.346 5.646-1.5 2.762-2.529 4.442-4.5 4.364-2.106-.076-3.225-1.972-4.52-4.167-1.444-2.443-3.112-5.191-6.487-5.1-3.272.113-4.879 2.606-6.3 4.808-1.5 2.328-2.552 3.746-4.551 3.662-2.156-.076-3.27-1.65-4.558-3.472-1.447-2.047-3.077-4.363-6.442-4.251-3.2.109-4.807 2.153-6.224 3.954-1.346 1.709-2.4 3.062-4.621 2.977a1.094 1.094 0 0 0-.079 2.186c3.3.11 4.967-1.967 6.417-3.81 1.286-1.635 2.4-3.045 4.582-3.12 2.1-.09 3.091 1.218 4.584 3.327 1.417 2 3.026 4.277 6.263 4.394 3.391.114 5.022-2.42 6.467-4.663 1.292-2 2.406-3.734 4.535-3.807 1.959-.073 3.026 1.475 4.529 4.022 1.417 2.4 3.023 5.121 6.324 5.241 3.415.118 5.064-2.863 6.5-5.5 1.245-2.282 2.419-4.437 4.5-4.509 1.959-.046 2.981 1.743 4.492 4.732 1.412 2.79 3.013 5.95 6.365 6.071h.185c3.348 0 4.937-3.36 6.343-6.331 1.245-2.634 2.423-5.114 4.444-5.216Z",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 50",d:"M342.691 618.882h43.71v-43.71h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 8",transform:"rotate(-14.98 2188.845 -1120.376)"},r.createElement("rect",{"data-name":"Rectangle 3",width:92.361,height:36.462,rx:2,fill:"#d8d8d8"}),r.createElement("g",{"data-name":"Group 2",transform:"translate(1.531 23.03)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 4",width:5.336,height:5.336,rx:1,transform:"translate(16.797)"}),r.createElement("rect",{"data-name":"Rectangle 5",width:5.336,height:5.336,rx:1,transform:"translate(23.12)"}),r.createElement("rect",{"data-name":"Rectangle 6",width:5.336,height:5.336,rx:1,transform:"translate(29.444)"}),r.createElement("rect",{"data-name":"Rectangle 7",width:5.336,height:5.336,rx:1,transform:"translate(35.768)"}),r.createElement("rect",{"data-name":"Rectangle 8",width:5.336,height:5.336,rx:1,transform:"translate(42.091)"}),r.createElement("rect",{"data-name":"Rectangle 9",width:5.336,height:5.336,rx:1,transform:"translate(48.415)"}),r.createElement("rect",{"data-name":"Rectangle 10",width:5.336,height:5.336,rx:1,transform:"translate(54.739)"}),r.createElement("rect",{"data-name":"Rectangle 11",width:5.336,height:5.336,rx:1,transform:"translate(61.063)"}),r.createElement("rect",{"data-name":"Rectangle 12",width:5.336,height:5.336,rx:1,transform:"translate(67.386)"}),r.createElement("path",{"data-name":"Path 51",d:"M1.093 0h13.425a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0ZM75 0h13.426a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H75a1.093 1.093 0 0 1-1.093-1.093v-3.15A1.093 1.093 0 0 1 75 0Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 3",transform:"translate(1.531 10.261)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 52",d:"M1.093 0h5.125A1.093 1.093 0 0 1 7.31 1.093v3.149a1.093 1.093 0 0 1-1.092 1.093H1.093A1.093 1.093 0 0 1 0 4.242V1.093A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 13",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 14",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 15",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 16",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 17",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 18",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 19",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 20",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 21",width:5.336,height:5.336,rx:1,transform:"translate(58.888)"}),r.createElement("rect",{"data-name":"Rectangle 22",width:5.336,height:5.336,rx:1,transform:"translate(65.212)"}),r.createElement("rect",{"data-name":"Rectangle 23",width:5.336,height:5.336,rx:1,transform:"translate(71.536)"}),r.createElement("rect",{"data-name":"Rectangle 24",width:5.336,height:5.336,rx:1,transform:"translate(77.859)"}),r.createElement("rect",{"data-name":"Rectangle 25",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 4",transform:"rotate(180 45.525 4.773)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 53",d:"M1.093 0h5.126a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 26",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 27",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 28",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 29",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 30",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 31",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 32",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 33",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 34",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 35",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 36",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 37",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 38",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("rect",{"data-name":"Rectangle 39",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 40",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 41",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 42",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 43",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 44",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 45",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 46",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 47",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 48",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 49",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 50",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 51",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 6",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 54",d:"M2.624 16.584h7.3a1.093 1.093 0 0 1 1.092 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093h-7.3a1.093 1.093 0 0 1-1.092-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 5",transform:"translate(12.202 16.584)"},r.createElement("rect",{"data-name":"Rectangle 52",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 53",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 54",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 55",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("rect",{"data-name":"Rectangle 56",width:5.336,height:5.336,rx:1,transform:"translate(25.295)"}),r.createElement("rect",{"data-name":"Rectangle 57",width:5.336,height:5.336,rx:1,transform:"translate(31.619)"}),r.createElement("rect",{"data-name":"Rectangle 58",width:5.336,height:5.336,rx:1,transform:"translate(37.942)"}),r.createElement("rect",{"data-name":"Rectangle 59",width:5.336,height:5.336,rx:1,transform:"translate(44.265)"}),r.createElement("rect",{"data-name":"Rectangle 60",width:5.336,height:5.336,rx:1,transform:"translate(50.589)"}),r.createElement("rect",{"data-name":"Rectangle 61",width:5.336,height:5.336,rx:1,transform:"translate(56.912)"}),r.createElement("rect",{"data-name":"Rectangle 62",width:5.336,height:5.336,rx:1,transform:"translate(63.236)"})),r.createElement("path",{"data-name":"Path 55",d:"M83.053 16.584h6.906a1.093 1.093 0 0 1 1.091 1.093v3.15a1.093 1.093 0 0 1-1.091 1.093h-6.907a1.093 1.093 0 0 1-1.093-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 7",transform:"translate(1.531 29.627)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 63",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 64",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 65",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 66",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("path",{"data-name":"Path 56",d:"M26.387 0h30.422a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093H26.387a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 26.387 0Zm33.594 0h3.942a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093h-3.942a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 59.981 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 67",width:5.336,height:5.336,rx:1,transform:"translate(66.003)"}),r.createElement("rect",{"data-name":"Rectangle 68",width:5.336,height:5.336,rx:1,transform:"translate(72.327)"}),r.createElement("rect",{"data-name":"Rectangle 69",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("path",{"data-name":"Path 57",d:"M78.254 2.273v-1.18A1.093 1.093 0 0 1 79.347 0h3.15a1.093 1.093 0 0 1 1.093 1.093v1.18Z"}),r.createElement("path",{"data-name":"Path 58",d:"M83.591 3.063v1.18a1.093 1.093 0 0 1-1.093 1.093h-3.15a1.093 1.093 0 0 1-1.093-1.093v-1.18Z"})),r.createElement("rect",{"data-name":"Rectangle 70",width:88.927,height:2.371,rx:1.085,transform:"translate(1.925 1.17)",fill:"#4a4a4a"}),r.createElement("rect",{"data-name":"Rectangle 71",width:4.986,height:1.581,rx:.723,transform:"translate(4.1 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 72",width:4.986,height:1.581,rx:.723,transform:"translate(10.923 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 73",width:4.986,height:1.581,rx:.723,transform:"translate(16.173 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 74",width:4.986,height:1.581,rx:.723,transform:"translate(21.421 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 75",width:4.986,height:1.581,rx:.723,transform:"translate(26.671 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 76",width:4.986,height:1.581,rx:.723,transform:"translate(33.232 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 77",width:4.986,height:1.581,rx:.723,transform:"translate(38.48 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 78",width:4.986,height:1.581,rx:.723,transform:"translate(43.73 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 79",width:4.986,height:1.581,rx:.723,transform:"translate(48.978 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 80",width:4.986,height:1.581,rx:.723,transform:"translate(55.54 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 81",width:4.986,height:1.581,rx:.723,transform:"translate(60.788 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 82",width:4.986,height:1.581,rx:.723,transform:"translate(66.038 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 83",width:4.986,height:1.581,rx:.723,transform:"translate(72.599 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 84",width:4.986,height:1.581,rx:.723,transform:"translate(77.847 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 85",width:4.986,height:1.581,rx:.723,transform:"translate(83.097 1.566)",fill:"#d8d8d8",opacity:.136})),r.createElement("path",{"data-name":"Path 59",d:"M408.256 591.563a5.439 5.439 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 60",d:"M342.691 553.317h43.71v-21.855h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 61",d:"M397.328 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.811 2.811 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 62",d:"M408.256 464.531a2.967 2.967 0 0 1-.535-.055 2.754 2.754 0 0 1-.514-.153 2.838 2.838 0 0 1-.471-.251 4.139 4.139 0 0 1-.415-.339 3.2 3.2 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.517 2.968 2.968 0 0 1 .055-.535 3.152 3.152 0 0 1 .152-.514 2.874 2.874 0 0 1 .252-.47 2.633 2.633 0 0 1 .753-.754 2.837 2.837 0 0 1 .471-.251 2.753 2.753 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 4.019 4.019 0 0 1 .339.415 2.786 2.786 0 0 1 .251.47 2.864 2.864 0 0 1 .208 1.049 2.77 2.77 0 0 1-.8 1.934 4.139 4.139 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459m21.855-1.366a2.789 2.789 0 0 1-1.935-.8 4.162 4.162 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.519 2.789 2.789 0 0 1 .8-1.934 4.139 4.139 0 0 1 .415-.339 2.838 2.838 0 0 1 .471-.251 2.752 2.752 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 2.79 2.79 0 0 1 .8 1.934 3.069 3.069 0 0 1-.055.535 2.779 2.779 0 0 1-.153.514 3.885 3.885 0 0 1-.251.47 4.02 4.02 0 0 1-.339.415 4.138 4.138 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459",fillRule:"evenodd"}))))}},4002:(e,t,a)=>{a.d(t,{Z:()=>F});var l,r,n,c,m,h,d,i,f,s,o,E,g,p,x,R,v,w,u,M,y,Z,P,b,A,q,H,N,k,L,O,G,V,_,S,j,B=a(7294);function C(){return C=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...F}=e;return B.createElement("svg",C({xmlns:"http://www.w3.org/2000/svg",width:1129,height:663,viewBox:"0 0 1129 663","aria-labelledby":a},F),void 0===t?B.createElement("title",{id:a},"Focus on What Matters"):t?B.createElement("title",{id:a},t):null,l||(l=B.createElement("circle",{cx:321,cy:321,r:321,fill:"#f2f2f2"})),r||(r=B.createElement("ellipse",{cx:559,cy:635.5,rx:514,ry:27.5,fill:"#3f3d56"})),n||(n=B.createElement("ellipse",{cx:558,cy:627,rx:460,ry:22,opacity:.2})),c||(c=B.createElement("path",{fill:"#3f3d56",d:"M131 152.5h840v50H131z"})),m||(m=B.createElement("path",{d:"M131 608.83a21.67 21.67 0 0 0 21.67 21.67h796.66A21.67 21.67 0 0 0 971 608.83V177.5H131ZM949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",fill:"#3f3d56"})),h||(h=B.createElement("path",{d:"M949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",opacity:.2})),d||(d=B.createElement("circle",{cx:181,cy:147.5,r:13,fill:"#3f3d56"})),i||(i=B.createElement("circle",{cx:217,cy:147.5,r:13,fill:"#3f3d56"})),f||(f=B.createElement("circle",{cx:253,cy:147.5,r:13,fill:"#3f3d56"})),s||(s=B.createElement("rect",{x:168,y:213.5,width:337,height:386,rx:5.335,fill:"#606060"})),o||(o=B.createElement("rect",{x:603,y:272.5,width:284,height:22,rx:5.476,fill:"#2e8555"})),E||(E=B.createElement("rect",{x:537,y:352.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),g||(g=B.createElement("rect",{x:537,y:396.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),p||(p=B.createElement("rect",{x:537,y:440.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),x||(x=B.createElement("rect",{x:537,y:484.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),R||(R=B.createElement("rect",{x:865,y:552.5,width:88,height:26,rx:7.028,fill:"#3ecc5f"})),v||(v=B.createElement("path",{d:"M1053.103 506.116a30.114 30.114 0 0 0 3.983-15.266c0-13.797-8.544-24.98-19.083-24.98s-19.082 11.183-19.082 24.98a30.114 30.114 0 0 0 3.983 15.266 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 30.114 30.114 0 0 0-3.983 15.266c0 13.797 8.543 24.981 19.082 24.981s19.083-11.184 19.083-24.98a30.114 30.114 0 0 0-3.983-15.267 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532Z",fill:"#3f3d56"})),w||(w=B.createElement("ellipse",{cx:1038.003,cy:460.318,rx:19.083,ry:24.981,fill:"#3f3d56"})),u||(u=B.createElement("ellipse",{cx:1038.003,cy:429.786,rx:19.083,ry:24.981,fill:"#3f3d56"})),M||(M=B.createElement("path",{d:"M1109.439 220.845a91.61 91.61 0 0 0 7.106-10.461l-50.14-8.235 54.228.403a91.566 91.566 0 0 0 1.746-72.426l-72.755 37.742 67.097-49.321A91.413 91.413 0 1 0 965.75 220.845a91.458 91.458 0 0 0-10.425 16.67l65.087 33.814-69.4-23.292a91.46 91.46 0 0 0 14.738 85.837 91.406 91.406 0 1 0 143.689 0 91.418 91.418 0 0 0 0-113.03Z",fill:"#3ecc5f",fillRule:"evenodd"})),y||(y=B.createElement("path",{d:"M946.188 277.36a91.013 91.013 0 0 0 19.562 56.514 91.406 91.406 0 1 0 143.689 0c12.25-15.553-163.25-66.774-163.25-56.515Z",opacity:.1})),Z||(Z=B.createElement("path",{d:"M330.12 342.936h111.474v45.12H330.12Z",fill:"#fff",fillRule:"evenodd"})),P||(P=B.createElement("path",{d:"M229.263 490.241a26.51 26.51 0 0 1-22.963-13.27 26.51 26.51 0 0 0 22.963 39.812h26.541V490.24Z",fill:"#3ecc5f",fillRule:"evenodd"})),b||(b=B.createElement("path",{d:"m348.672 350.07 92.922-5.807v-13.27a26.54 26.54 0 0 0-26.541-26.542H295.616l-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746-3.317-5.746a3.83 3.83 0 0 0-6.636 0l-3.317 5.746-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746c-.03 0-.056.004-.086.004l-5.497-5.495a3.83 3.83 0 0 0-6.407 1.717l-1.817 6.773-6.89-1.847a3.83 3.83 0 0 0-4.691 4.693l1.844 6.891-6.77 1.814a3.832 3.832 0 0 0-1.72 6.41l5.497 5.497c0 .028-.004.055-.004.085l-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318a26.54 26.54 0 0 0 26.541 26.542h159.249a26.54 26.54 0 0 0 26.541-26.542V384.075l-92.922-5.807a14.126 14.126 0 0 1 0-28.197",fill:"#3ecc5f",fillRule:"evenodd"})),A||(A=B.createElement("path",{d:"M388.511 516.783h39.812V463.7h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),q||(q=B.createElement("path",{d:"M454.865 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.099-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.445-.446a6.624 6.624 0 1 0-11.397-6.564c-.196-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.101 0c-.197.05-.394.097-.59.152a6.628 6.628 0 1 0-11.398 6.564 26.528 26.528 0 1 0 44.232 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),H||(H=B.createElement("path",{d:"M401.782 437.158h39.812v-26.541h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),N||(N=B.createElement("path",{d:"M454.865 427.205a3.318 3.318 0 0 0 0-6.635 3.411 3.411 0 0 0-.424.042c-.026-.1-.049-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2 .688q-.11-.113-.224-.223a3.282 3.282 0 0 0 .672-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.672-1.982q.114-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .424.042",fill:"#44d860",fillRule:"evenodd"})),k||(k=B.createElement("path",{d:"M282.345 347.581a3.318 3.318 0 0 1-3.317-3.318 9.953 9.953 0 1 0-19.906 0 3.318 3.318 0 1 1-6.636 0 16.588 16.588 0 1 1 33.177 0 3.318 3.318 0 0 1-3.318 3.318",fillRule:"evenodd"})),L||(L=B.createElement("path",{d:"M335.428 516.783h79.625a26.54 26.54 0 0 0 26.541-26.542v-92.895H361.97a26.54 26.54 0 0 0-26.542 26.542Z",fill:"#ffff50",fillRule:"evenodd"})),O||(O=B.createElement("path",{d:"M421.714 438.485h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.541h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0-66.106h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m0 26.294h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m19.88-122.607c-.016 0-.03-.008-.045-.007-4.1.14-6.04 4.241-7.753 7.86-1.786 3.783-3.168 6.242-5.432 6.167-2.506-.09-3.94-2.922-5.458-5.918-1.744-3.443-3.734-7.347-7.913-7.201-4.042.138-5.99 3.708-7.706 6.857-1.828 3.355-3.071 5.394-5.47 5.3-2.557-.093-3.916-2.395-5.488-5.06-1.753-2.967-3.78-6.304-7.878-6.19-3.973.137-5.925 3.166-7.648 5.84-1.822 2.826-3.098 4.549-5.527 4.447-2.618-.093-3.97-2.004-5.535-4.216-1.757-2.486-3.737-5.3-7.823-5.163-3.886.133-5.838 2.615-7.56 4.802-1.634 2.075-2.91 3.718-5.611 3.615a1.328 1.328 0 1 0-.096 2.654c4.004.134 6.032-2.389 7.793-4.628 1.562-1.985 2.91-3.698 5.564-3.789 2.556-.108 3.754 1.48 5.567 4.041 1.721 2.434 3.675 5.195 7.606 5.337 4.118.138 6.099-2.94 7.853-5.663 1.569-2.434 2.923-4.535 5.508-4.624 2.38-.088 3.674 1.792 5.5 4.885 1.722 2.916 3.671 6.22 7.68 6.365 4.147.143 6.15-3.477 7.895-6.682 1.511-2.77 2.938-5.388 5.466-5.475 2.38-.056 3.62 2.116 5.456 5.746 1.714 3.388 3.658 7.226 7.73 7.373l.224.004c4.066 0 5.996-4.08 7.704-7.689 1.511-3.198 2.942-6.21 5.397-6.334Z",fillRule:"evenodd"})),G||(G=B.createElement("path",{d:"M308.887 516.783h53.083V463.7h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),V||(V=B.createElement("path",{d:"M388.511 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.098-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.444-.446a6.624 6.624 0 1 0-11.397-6.564c-.197-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.102 0c-.196.05-.394.097-.59.152a6.628 6.628 0 1 0-11.397 6.564 26.528 26.528 0 1 0 44.231 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),_||(_=B.createElement("path",{d:"M308.887 437.158h53.083v-26.541h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),S||(S=B.createElement("path",{d:"M375.24 427.205a3.318 3.318 0 1 0 0-6.635 3.411 3.411 0 0 0-.423.042c-.026-.1-.05-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2.001.688q-.11-.113-.223-.223a3.282 3.282 0 0 0 .671-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.671-1.982q.113-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .423.042",fill:"#44d860",fillRule:"evenodd"})),j||(j=B.createElement("path",{d:"M388.511 329.334a3.603 3.603 0 0 1-.65-.067 3.344 3.344 0 0 1-.624-.185 3.447 3.447 0 0 1-.572-.306 5.027 5.027 0 0 1-.504-.411 3.887 3.887 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.602 3.602 0 0 1 .067-.65 3.826 3.826 0 0 1 .184-.624 3.489 3.489 0 0 1 .307-.57 3.197 3.197 0 0 1 .914-.916 3.447 3.447 0 0 1 .572-.305 3.344 3.344 0 0 1 .624-.186 3.07 3.07 0 0 1 1.3 0 3.223 3.223 0 0 1 1.195.49 5.028 5.028 0 0 1 .504.412 4.88 4.88 0 0 1 .411.504 3.382 3.382 0 0 1 .306.571 3.478 3.478 0 0 1 .252 1.274 3.364 3.364 0 0 1-.969 2.349 5.027 5.027 0 0 1-.504.411 3.306 3.306 0 0 1-1.845.558m26.542-1.66a3.388 3.388 0 0 1-2.35-.968 5.042 5.042 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.387 3.387 0 0 1 .967-2.349 5.026 5.026 0 0 1 .505-.411 3.447 3.447 0 0 1 .572-.305 3.343 3.343 0 0 1 .623-.186 3.07 3.07 0 0 1 1.3 0 3.224 3.224 0 0 1 1.195.49 5.026 5.026 0 0 1 .504.412 3.388 3.388 0 0 1 .97 2.35 3.726 3.726 0 0 1-.067.65 3.374 3.374 0 0 1-.186.623 4.715 4.715 0 0 1-.305.57 4.88 4.88 0 0 1-.412.505 5.026 5.026 0 0 1-.504.412 3.305 3.305 0 0 1-1.844.557",fillRule:"evenodd"})))}},8391:(e,t,a)=>{a.r(t),a.d(t,{default:()=>g});var l=a(7294),r=a(6010),n=a(9960),c=a(2263),m=a(7961),h=a(7462);const d={features:"features_t9lD",featureSvg:"featureSvg_GfXr"},i=[{title:l.createElement("a",{href:"https://makinarocks.ai/"},"MakinaRocks"),Svg:a(4002).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"Sponsored by MakinaRocks"),"\uc774 \ud504\ub85c\uc81d\ud2b8\ub294 MakinaRocks\uc758 \uc9c0\uc6d0\uc744 \ubc1b\uc544 \uc81c\uc791\ub418\uc5c8\uc2b5\ub2c8\ub2e4.")},{title:l.createElement("a",{href:"https://mlops-for-mle.github.io/"},"MLOps for MLE"),Svg:a(9722).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"ML Engineer\ub97c \uc704\ud55c MLOps Release!"),"\uad6c\uae00\uc5d0\uc11c \uc81c\uc548\ud55c MLOps 0\ub2e8\uacc4\ub97c \uc9c1\uc811 \uad6c\ud604\ud558\uba70 MLOps \uac00 \ubb34\uc5c7\uc778\uc9c0 \uacf5\ubd80\ud560 \uc218 \uc788\ub294 \ud29c\ud1a0\ub9ac\uc5bc\uc744 \uc624\ud508\ud588\uc2b5\ub2c8\ub2e4!")}];function f(e){let{title:t,Svg:a,description:n}=e;return l.createElement("div",{className:(0,r.Z)("col col--6")},l.createElement("div",{className:"text--center"},l.createElement(a,{className:d.featureSvg,role:"img"})),l.createElement("div",{className:"text--center padding-horiz--md"},l.createElement("h3",null,t),l.createElement("p",null,n)))}function s(){return l.createElement("section",{className:d.features},l.createElement("div",{className:"container"},l.createElement("div",{className:"row"},i.map(((e,t)=>l.createElement(f,(0,h.Z)({key:t},e)))))))}const o={heroBanner:"heroBanner_qdFl",buttons:"buttons_AeoN"};function E(){const{siteConfig:e}=(0,c.Z)();return l.createElement("header",{className:(0,r.Z)("hero hero--primary",o.heroBanner)},l.createElement("div",{className:"container"},l.createElement("h1",{className:"hero__title"},e.title),l.createElement("p",{className:"hero__subtitle"},e.tagline),l.createElement("div",{className:o.buttons},l.createElement(n.Z,{className:"button button--secondary button--lg",to:"/docs/introduction/intro"},"Let's Start!"))))}function g(){const{siteConfig:e}=(0,c.Z)();return l.createElement(m.Z,{title:"MLOps for ALL",description:"Description will go into a meta tag in "},l.createElement(E,null),l.createElement("main",null,l.createElement(s,null)))}}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3237],{9722:(e,t,a)=>{a.d(t,{Z:()=>c});var l,r=a(7294);function n(){return n=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...c}=e;return r.createElement("svg",n({xmlns:"http://www.w3.org/2000/svg",width:1088,height:687.962,viewBox:"0 0 1088 687.962","aria-labelledby":a},c),void 0===t?r.createElement("title",{id:a},"Easy to Use"):t?r.createElement("title",{id:a},t):null,l||(l=r.createElement("g",{"data-name":"Group 12"},r.createElement("g",{"data-name":"Group 11"},r.createElement("path",{"data-name":"Path 83",d:"M961.81 454.442c-5.27 45.15-16.22 81.4-31.25 110.31-20 38.52-54.21 54.04-84.77 70.28a193.275 193.275 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.282 657.282 0 0 0-104.09-13.16q-14.97-.675-29.97-.67c-15.42.02-293.07 5.29-360.67-131.57-16.69-33.76-28.13-75-32.24-125.27-11.63-142.12 52.29-235.46 134.74-296.47 155.97-115.41 369.76-110.57 523.43 7.88 102.36 78.9 198.2 198.31 179.02 362.74Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 84",d:"M930.56 564.752c-20 38.52-47.21 64.04-77.77 80.28a193.272 193.272 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.3 657.3 0 0 0-104.09-13.16q-14.97-.675-29.97-.67-23.13.03-46.25 1.72c-100.17 7.36-253.82-6.43-321.42-143.29L326 177.962l62.95 161.619 20.09 51.59 55.37-75.98L493 275.962l130.2 149.27 36.8-81.27 254.78 207.919 14.21 11.59Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 85",d:"m302 282.962 26-57 36 83-31-60Z",opacity:.1}),r.createElement("path",{"data-name":"Path 86",d:"M554.5 647.802q-14.97-.675-29.97-.67l-115.49-255.96Z",opacity:.1}),r.createElement("path",{"data-name":"Path 87",d:"M464.411 315.191 493 292.962l130 150-132-128Z",opacity:.1}),r.createElement("path",{"data-name":"Path 88",d:"M852.79 645.032a193.265 193.265 0 0 1-27.46 11.94L623.2 425.232Z",opacity:.1}),r.createElement("circle",{"data-name":"Ellipse 11",cx:3,cy:3,r:3,transform:"translate(479 98.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 12",cx:3,cy:3,r:3,transform:"translate(396 201.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 13",cx:2,cy:2,r:2,transform:"translate(600 220.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 14",cx:2,cy:2,r:2,transform:"translate(180 265.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 15",cx:2,cy:2,r:2,transform:"translate(612 96.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 16",cx:2,cy:2,r:2,transform:"translate(736 192.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 17",cx:2,cy:2,r:2,transform:"translate(858 344.962)",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 89",d:"M306 121.222h-2.76v-2.76h-1.48v2.76H299v1.478h2.76v2.759h1.48V122.7H306Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 90",d:"M848 424.222h-2.76v-2.76h-1.48v2.76H841v1.478h2.76v2.759h1.48V425.7H848Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 91",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 92",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",opacity:.1}),r.createElement("ellipse",{"data-name":"Ellipse 18",cx:544,cy:30,rx:544,ry:30,transform:"translate(0 583.962)",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 93",d:"M568 571.962c0 33.137-14.775 24-33 24s-33 9.137-33-24 33-96 33-96 33 62.863 33 96Z",fill:"#ff6584"}),r.createElement("path",{"data-name":"Path 94",d:"M550 584.641c0 15.062-6.716 10.909-15 10.909s-15 4.153-15-10.909 15-43.636 15-43.636 15 28.576 15 43.636Z",opacity:.1}),r.createElement("rect",{"data-name":"Rectangle 97",width:92,height:18,rx:9,transform:"translate(489 604.962)",fill:"#2f2e41"}),r.createElement("rect",{"data-name":"Rectangle 98",width:92,height:18,rx:9,transform:"translate(489 586.962)",fill:"#2f2e41"}),r.createElement("path",{"data-name":"Path 95",d:"M137 490.528c0 55.343 34.719 100.126 77.626 100.126",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 96",d:"M214.626 590.654c0-55.965 38.745-101.251 86.626-101.251",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 97",d:"M165.125 495.545c0 52.57 22.14 95.109 49.5 95.109",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 98",d:"M214.626 590.654c0-71.511 44.783-129.377 100.126-129.377",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 99",d:"M198.3 591.36s11.009-.339 14.326-2.7 16.934-5.183 17.757-1.395 16.544 18.844 4.115 18.945-28.879-1.936-32.19-3.953-4.008-10.897-4.008-10.897Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 100",d:"M234.716 604.89c-12.429.1-28.879-1.936-32.19-3.953-2.522-1.536-3.527-7.048-3.863-9.591l-.368.014s.7 8.879 4.009 10.9 19.761 4.053 32.19 3.953c3.588-.029 4.827-1.305 4.759-3.2-.498 1.142-1.867 1.855-4.537 1.877Z",opacity:.2}),r.createElement("path",{"data-name":"Path 101",d:"M721.429 527.062c0 38.029 23.857 68.8 53.341 68.8",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 102",d:"M774.769 595.863c0-38.456 26.623-69.575 59.525-69.575",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 103",d:"M740.755 530.509c0 36.124 15.213 65.354 34.014 65.354",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 104",d:"M774.769 595.863c0-49.139 30.773-88.9 68.8-88.9",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 105",d:"M763.548 596.348s7.565-.233 9.844-1.856 11.636-3.562 12.2-.958 11.368 12.949 2.828 13.018-19.844-1.33-22.119-2.716-2.753-7.488-2.753-7.488Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 106",d:"M788.574 605.645c-8.54.069-19.844-1.33-22.119-2.716-1.733-1.056-2.423-4.843-2.654-6.59l-.253.01s.479 6.1 2.755 7.487 13.579 2.785 22.119 2.716c2.465-.02 3.317-.9 3.27-2.2-.343.788-1.283 1.278-3.118 1.293Z",opacity:.2}),r.createElement("path",{"data-name":"Path 107",d:"M893.813 618.699s11.36-1.729 14.5-4.591 16.89-7.488 18.217-3.667 19.494 17.447 6.633 19.107-30.153 1.609-33.835-.065-5.515-10.784-5.515-10.784Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 108",d:"M933.228 628.154c-12.86 1.659-30.153 1.609-33.835-.065-2.8-1.275-4.535-6.858-5.2-9.45l-.379.061s1.833 9.109 5.516 10.783 20.975 1.725 33.835.065c3.712-.479 4.836-1.956 4.529-3.906-.375 1.246-1.703 2.156-4.466 2.512Z",opacity:.2}),r.createElement("path",{"data-name":"Path 109",d:"M614.26 617.881s9.587-1.459 12.237-3.875 14.255-6.32 15.374-3.095 16.452 14.725 5.6 16.125-25.448 1.358-28.555-.055-4.656-9.1-4.656-9.1Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 110",d:"M647.524 625.856c-10.853 1.4-25.448 1.358-28.555-.055-2.367-1.076-3.827-5.788-4.39-7.976l-.32.051s1.547 7.687 4.655 9.1 17.7 1.456 28.555.055c3.133-.4 4.081-1.651 3.822-3.3-.314 1.057-1.435 1.825-3.767 2.125Z",opacity:.2}),r.createElement("path",{"data-name":"Path 111",d:"M122.389 613.09s7.463-1.136 9.527-3.016 11.1-4.92 11.969-2.409 12.808 11.463 4.358 12.553-19.811 1.057-22.23-.043-3.624-7.085-3.624-7.085Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 112",d:"M148.285 619.302c-8.449 1.09-19.811 1.057-22.23-.043-1.842-.838-2.979-4.506-3.417-6.209l-.249.04s1.2 5.984 3.624 7.085 13.781 1.133 22.23.043c2.439-.315 3.177-1.285 2.976-2.566-.246.818-1.119 1.416-2.934 1.65Z",opacity:.2}),r.createElement("path",{"data-name":"Path 113",d:"M383.7 601.318c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.118-36.793 93.694-36.793 93.08 6.573 93.08 36.793Z",opacity:.1}),r.createElement("path",{"data-name":"Path 114",d:"M383.7 593.881c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.114-36.8 93.69-36.8 93.084 6.576 93.084 36.8Z",fill:"#3f3d56"})),r.createElement("path",{"data-name":"Path 40",d:"M360.175 475.732h91.791v37.153h-91.791Z",fill:"#fff",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 41",d:"M277.126 597.026a21.828 21.828 0 0 1-18.908-10.927 21.829 21.829 0 0 0 18.908 32.782h21.855v-21.855Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 42",d:"m375.451 481.607 76.514-4.782v-10.928a21.854 21.854 0 0 0-21.855-21.855h-98.347l-2.732-4.735a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.732-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.731-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.735h-.071l-4.526-4.525a3.153 3.153 0 0 0-5.276 1.414l-1.5 5.577-5.674-1.521a3.154 3.154 0 0 0-3.863 3.864l1.52 5.679-5.575 1.494a3.155 3.155 0 0 0-1.416 5.278l4.526 4.526v.07l-4.735 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.727a3.154 3.154 0 0 0 0 5.464l4.735 2.736-4.735 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.735a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.728a3.154 3.154 0 0 0 0 5.464l4.732 2.732a21.854 21.854 0 0 0 21.858 21.855h131.13a21.854 21.854 0 0 0 21.855-21.855v-87.42l-76.514-4.782a11.632 11.632 0 0 1 0-23.219",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 43",d:"M408.255 618.882h32.782v-43.71h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 44",d:"M462.893 591.563a5.438 5.438 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 45",d:"M419.183 553.317h32.782v-21.855h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 46",d:"M462.893 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.814 2.814 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 47",d:"M320.836 479.556a2.732 2.732 0 0 1-2.732-2.732 8.2 8.2 0 0 0-16.391 0 2.732 2.732 0 0 1-5.464 0 13.66 13.66 0 0 1 27.319 0 2.732 2.732 0 0 1-2.732 2.732",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 48",d:"M364.546 618.881h65.565a21.854 21.854 0 0 0 21.855-21.855v-76.492h-65.565a21.854 21.854 0 0 0-21.855 21.855Z",fill:"#ffff50",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 49",d:"M435.596 554.41h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0-54.434h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.652h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m16.369-100.959c-.013 0-.024-.007-.037-.005-3.377.115-4.974 3.492-6.384 6.472-1.471 3.114-2.608 5.139-4.473 5.078-2.064-.074-3.244-2.406-4.494-4.874-1.436-2.835-3.075-6.049-6.516-5.929-3.329.114-4.932 3.053-6.346 5.646-1.5 2.762-2.529 4.442-4.5 4.364-2.106-.076-3.225-1.972-4.52-4.167-1.444-2.443-3.112-5.191-6.487-5.1-3.272.113-4.879 2.606-6.3 4.808-1.5 2.328-2.552 3.746-4.551 3.662-2.156-.076-3.27-1.65-4.558-3.472-1.447-2.047-3.077-4.363-6.442-4.251-3.2.109-4.807 2.153-6.224 3.954-1.346 1.709-2.4 3.062-4.621 2.977a1.094 1.094 0 0 0-.079 2.186c3.3.11 4.967-1.967 6.417-3.81 1.286-1.635 2.4-3.045 4.582-3.12 2.1-.09 3.091 1.218 4.584 3.327 1.417 2 3.026 4.277 6.263 4.394 3.391.114 5.022-2.42 6.467-4.663 1.292-2 2.406-3.734 4.535-3.807 1.959-.073 3.026 1.475 4.529 4.022 1.417 2.4 3.023 5.121 6.324 5.241 3.415.118 5.064-2.863 6.5-5.5 1.245-2.282 2.419-4.437 4.5-4.509 1.959-.046 2.981 1.743 4.492 4.732 1.412 2.79 3.013 5.95 6.365 6.071h.185c3.348 0 4.937-3.36 6.343-6.331 1.245-2.634 2.423-5.114 4.444-5.216Z",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 50",d:"M342.691 618.882h43.71v-43.71h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 8",transform:"rotate(-14.98 2188.845 -1120.376)"},r.createElement("rect",{"data-name":"Rectangle 3",width:92.361,height:36.462,rx:2,fill:"#d8d8d8"}),r.createElement("g",{"data-name":"Group 2",transform:"translate(1.531 23.03)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 4",width:5.336,height:5.336,rx:1,transform:"translate(16.797)"}),r.createElement("rect",{"data-name":"Rectangle 5",width:5.336,height:5.336,rx:1,transform:"translate(23.12)"}),r.createElement("rect",{"data-name":"Rectangle 6",width:5.336,height:5.336,rx:1,transform:"translate(29.444)"}),r.createElement("rect",{"data-name":"Rectangle 7",width:5.336,height:5.336,rx:1,transform:"translate(35.768)"}),r.createElement("rect",{"data-name":"Rectangle 8",width:5.336,height:5.336,rx:1,transform:"translate(42.091)"}),r.createElement("rect",{"data-name":"Rectangle 9",width:5.336,height:5.336,rx:1,transform:"translate(48.415)"}),r.createElement("rect",{"data-name":"Rectangle 10",width:5.336,height:5.336,rx:1,transform:"translate(54.739)"}),r.createElement("rect",{"data-name":"Rectangle 11",width:5.336,height:5.336,rx:1,transform:"translate(61.063)"}),r.createElement("rect",{"data-name":"Rectangle 12",width:5.336,height:5.336,rx:1,transform:"translate(67.386)"}),r.createElement("path",{"data-name":"Path 51",d:"M1.093 0h13.425a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0ZM75 0h13.426a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H75a1.093 1.093 0 0 1-1.093-1.093v-3.15A1.093 1.093 0 0 1 75 0Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 3",transform:"translate(1.531 10.261)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 52",d:"M1.093 0h5.125A1.093 1.093 0 0 1 7.31 1.093v3.149a1.093 1.093 0 0 1-1.092 1.093H1.093A1.093 1.093 0 0 1 0 4.242V1.093A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 13",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 14",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 15",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 16",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 17",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 18",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 19",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 20",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 21",width:5.336,height:5.336,rx:1,transform:"translate(58.888)"}),r.createElement("rect",{"data-name":"Rectangle 22",width:5.336,height:5.336,rx:1,transform:"translate(65.212)"}),r.createElement("rect",{"data-name":"Rectangle 23",width:5.336,height:5.336,rx:1,transform:"translate(71.536)"}),r.createElement("rect",{"data-name":"Rectangle 24",width:5.336,height:5.336,rx:1,transform:"translate(77.859)"}),r.createElement("rect",{"data-name":"Rectangle 25",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 4",transform:"rotate(180 45.525 4.773)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 53",d:"M1.093 0h5.126a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 26",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 27",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 28",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 29",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 30",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 31",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 32",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 33",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 34",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 35",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 36",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 37",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 38",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("rect",{"data-name":"Rectangle 39",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 40",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 41",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 42",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 43",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 44",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 45",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 46",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 47",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 48",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 49",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 50",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 51",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 6",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 54",d:"M2.624 16.584h7.3a1.093 1.093 0 0 1 1.092 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093h-7.3a1.093 1.093 0 0 1-1.092-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 5",transform:"translate(12.202 16.584)"},r.createElement("rect",{"data-name":"Rectangle 52",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 53",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 54",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 55",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("rect",{"data-name":"Rectangle 56",width:5.336,height:5.336,rx:1,transform:"translate(25.295)"}),r.createElement("rect",{"data-name":"Rectangle 57",width:5.336,height:5.336,rx:1,transform:"translate(31.619)"}),r.createElement("rect",{"data-name":"Rectangle 58",width:5.336,height:5.336,rx:1,transform:"translate(37.942)"}),r.createElement("rect",{"data-name":"Rectangle 59",width:5.336,height:5.336,rx:1,transform:"translate(44.265)"}),r.createElement("rect",{"data-name":"Rectangle 60",width:5.336,height:5.336,rx:1,transform:"translate(50.589)"}),r.createElement("rect",{"data-name":"Rectangle 61",width:5.336,height:5.336,rx:1,transform:"translate(56.912)"}),r.createElement("rect",{"data-name":"Rectangle 62",width:5.336,height:5.336,rx:1,transform:"translate(63.236)"})),r.createElement("path",{"data-name":"Path 55",d:"M83.053 16.584h6.906a1.093 1.093 0 0 1 1.091 1.093v3.15a1.093 1.093 0 0 1-1.091 1.093h-6.907a1.093 1.093 0 0 1-1.093-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 7",transform:"translate(1.531 29.627)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 63",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 64",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 65",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 66",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("path",{"data-name":"Path 56",d:"M26.387 0h30.422a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093H26.387a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 26.387 0Zm33.594 0h3.942a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093h-3.942a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 59.981 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 67",width:5.336,height:5.336,rx:1,transform:"translate(66.003)"}),r.createElement("rect",{"data-name":"Rectangle 68",width:5.336,height:5.336,rx:1,transform:"translate(72.327)"}),r.createElement("rect",{"data-name":"Rectangle 69",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("path",{"data-name":"Path 57",d:"M78.254 2.273v-1.18A1.093 1.093 0 0 1 79.347 0h3.15a1.093 1.093 0 0 1 1.093 1.093v1.18Z"}),r.createElement("path",{"data-name":"Path 58",d:"M83.591 3.063v1.18a1.093 1.093 0 0 1-1.093 1.093h-3.15a1.093 1.093 0 0 1-1.093-1.093v-1.18Z"})),r.createElement("rect",{"data-name":"Rectangle 70",width:88.927,height:2.371,rx:1.085,transform:"translate(1.925 1.17)",fill:"#4a4a4a"}),r.createElement("rect",{"data-name":"Rectangle 71",width:4.986,height:1.581,rx:.723,transform:"translate(4.1 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 72",width:4.986,height:1.581,rx:.723,transform:"translate(10.923 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 73",width:4.986,height:1.581,rx:.723,transform:"translate(16.173 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 74",width:4.986,height:1.581,rx:.723,transform:"translate(21.421 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 75",width:4.986,height:1.581,rx:.723,transform:"translate(26.671 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 76",width:4.986,height:1.581,rx:.723,transform:"translate(33.232 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 77",width:4.986,height:1.581,rx:.723,transform:"translate(38.48 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 78",width:4.986,height:1.581,rx:.723,transform:"translate(43.73 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 79",width:4.986,height:1.581,rx:.723,transform:"translate(48.978 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 80",width:4.986,height:1.581,rx:.723,transform:"translate(55.54 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 81",width:4.986,height:1.581,rx:.723,transform:"translate(60.788 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 82",width:4.986,height:1.581,rx:.723,transform:"translate(66.038 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 83",width:4.986,height:1.581,rx:.723,transform:"translate(72.599 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 84",width:4.986,height:1.581,rx:.723,transform:"translate(77.847 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 85",width:4.986,height:1.581,rx:.723,transform:"translate(83.097 1.566)",fill:"#d8d8d8",opacity:.136})),r.createElement("path",{"data-name":"Path 59",d:"M408.256 591.563a5.439 5.439 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 60",d:"M342.691 553.317h43.71v-21.855h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 61",d:"M397.328 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.811 2.811 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 62",d:"M408.256 464.531a2.967 2.967 0 0 1-.535-.055 2.754 2.754 0 0 1-.514-.153 2.838 2.838 0 0 1-.471-.251 4.139 4.139 0 0 1-.415-.339 3.2 3.2 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.517 2.968 2.968 0 0 1 .055-.535 3.152 3.152 0 0 1 .152-.514 2.874 2.874 0 0 1 .252-.47 2.633 2.633 0 0 1 .753-.754 2.837 2.837 0 0 1 .471-.251 2.753 2.753 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 4.019 4.019 0 0 1 .339.415 2.786 2.786 0 0 1 .251.47 2.864 2.864 0 0 1 .208 1.049 2.77 2.77 0 0 1-.8 1.934 4.139 4.139 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459m21.855-1.366a2.789 2.789 0 0 1-1.935-.8 4.162 4.162 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.519 2.789 2.789 0 0 1 .8-1.934 4.139 4.139 0 0 1 .415-.339 2.838 2.838 0 0 1 .471-.251 2.752 2.752 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 2.79 2.79 0 0 1 .8 1.934 3.069 3.069 0 0 1-.055.535 2.779 2.779 0 0 1-.153.514 3.885 3.885 0 0 1-.251.47 4.02 4.02 0 0 1-.339.415 4.138 4.138 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459",fillRule:"evenodd"}))))}},4002:(e,t,a)=>{a.d(t,{Z:()=>F});var l,r,n,c,m,h,d,i,f,s,o,E,g,p,x,R,v,w,u,M,y,Z,P,b,A,q,H,N,k,L,O,G,V,_,S,j,B=a(7294);function C(){return C=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...F}=e;return B.createElement("svg",C({xmlns:"http://www.w3.org/2000/svg",width:1129,height:663,viewBox:"0 0 1129 663","aria-labelledby":a},F),void 0===t?B.createElement("title",{id:a},"Focus on What Matters"):t?B.createElement("title",{id:a},t):null,l||(l=B.createElement("circle",{cx:321,cy:321,r:321,fill:"#f2f2f2"})),r||(r=B.createElement("ellipse",{cx:559,cy:635.5,rx:514,ry:27.5,fill:"#3f3d56"})),n||(n=B.createElement("ellipse",{cx:558,cy:627,rx:460,ry:22,opacity:.2})),c||(c=B.createElement("path",{fill:"#3f3d56",d:"M131 152.5h840v50H131z"})),m||(m=B.createElement("path",{d:"M131 608.83a21.67 21.67 0 0 0 21.67 21.67h796.66A21.67 21.67 0 0 0 971 608.83V177.5H131ZM949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",fill:"#3f3d56"})),h||(h=B.createElement("path",{d:"M949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",opacity:.2})),d||(d=B.createElement("circle",{cx:181,cy:147.5,r:13,fill:"#3f3d56"})),i||(i=B.createElement("circle",{cx:217,cy:147.5,r:13,fill:"#3f3d56"})),f||(f=B.createElement("circle",{cx:253,cy:147.5,r:13,fill:"#3f3d56"})),s||(s=B.createElement("rect",{x:168,y:213.5,width:337,height:386,rx:5.335,fill:"#606060"})),o||(o=B.createElement("rect",{x:603,y:272.5,width:284,height:22,rx:5.476,fill:"#2e8555"})),E||(E=B.createElement("rect",{x:537,y:352.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),g||(g=B.createElement("rect",{x:537,y:396.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),p||(p=B.createElement("rect",{x:537,y:440.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),x||(x=B.createElement("rect",{x:537,y:484.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),R||(R=B.createElement("rect",{x:865,y:552.5,width:88,height:26,rx:7.028,fill:"#3ecc5f"})),v||(v=B.createElement("path",{d:"M1053.103 506.116a30.114 30.114 0 0 0 3.983-15.266c0-13.797-8.544-24.98-19.083-24.98s-19.082 11.183-19.082 24.98a30.114 30.114 0 0 0 3.983 15.266 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 30.114 30.114 0 0 0-3.983 15.266c0 13.797 8.543 24.981 19.082 24.981s19.083-11.184 19.083-24.98a30.114 30.114 0 0 0-3.983-15.267 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532Z",fill:"#3f3d56"})),w||(w=B.createElement("ellipse",{cx:1038.003,cy:460.318,rx:19.083,ry:24.981,fill:"#3f3d56"})),u||(u=B.createElement("ellipse",{cx:1038.003,cy:429.786,rx:19.083,ry:24.981,fill:"#3f3d56"})),M||(M=B.createElement("path",{d:"M1109.439 220.845a91.61 91.61 0 0 0 7.106-10.461l-50.14-8.235 54.228.403a91.566 91.566 0 0 0 1.746-72.426l-72.755 37.742 67.097-49.321A91.413 91.413 0 1 0 965.75 220.845a91.458 91.458 0 0 0-10.425 16.67l65.087 33.814-69.4-23.292a91.46 91.46 0 0 0 14.738 85.837 91.406 91.406 0 1 0 143.689 0 91.418 91.418 0 0 0 0-113.03Z",fill:"#3ecc5f",fillRule:"evenodd"})),y||(y=B.createElement("path",{d:"M946.188 277.36a91.013 91.013 0 0 0 19.562 56.514 91.406 91.406 0 1 0 143.689 0c12.25-15.553-163.25-66.774-163.25-56.515Z",opacity:.1})),Z||(Z=B.createElement("path",{d:"M330.12 342.936h111.474v45.12H330.12Z",fill:"#fff",fillRule:"evenodd"})),P||(P=B.createElement("path",{d:"M229.263 490.241a26.51 26.51 0 0 1-22.963-13.27 26.51 26.51 0 0 0 22.963 39.812h26.541V490.24Z",fill:"#3ecc5f",fillRule:"evenodd"})),b||(b=B.createElement("path",{d:"m348.672 350.07 92.922-5.807v-13.27a26.54 26.54 0 0 0-26.541-26.542H295.616l-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746-3.317-5.746a3.83 3.83 0 0 0-6.636 0l-3.317 5.746-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746c-.03 0-.056.004-.086.004l-5.497-5.495a3.83 3.83 0 0 0-6.407 1.717l-1.817 6.773-6.89-1.847a3.83 3.83 0 0 0-4.691 4.693l1.844 6.891-6.77 1.814a3.832 3.832 0 0 0-1.72 6.41l5.497 5.497c0 .028-.004.055-.004.085l-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318a26.54 26.54 0 0 0 26.541 26.542h159.249a26.54 26.54 0 0 0 26.541-26.542V384.075l-92.922-5.807a14.126 14.126 0 0 1 0-28.197",fill:"#3ecc5f",fillRule:"evenodd"})),A||(A=B.createElement("path",{d:"M388.511 516.783h39.812V463.7h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),q||(q=B.createElement("path",{d:"M454.865 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.099-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.445-.446a6.624 6.624 0 1 0-11.397-6.564c-.196-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.101 0c-.197.05-.394.097-.59.152a6.628 6.628 0 1 0-11.398 6.564 26.528 26.528 0 1 0 44.232 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),H||(H=B.createElement("path",{d:"M401.782 437.158h39.812v-26.541h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),N||(N=B.createElement("path",{d:"M454.865 427.205a3.318 3.318 0 0 0 0-6.635 3.411 3.411 0 0 0-.424.042c-.026-.1-.049-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2 .688q-.11-.113-.224-.223a3.282 3.282 0 0 0 .672-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.672-1.982q.114-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .424.042",fill:"#44d860",fillRule:"evenodd"})),k||(k=B.createElement("path",{d:"M282.345 347.581a3.318 3.318 0 0 1-3.317-3.318 9.953 9.953 0 1 0-19.906 0 3.318 3.318 0 1 1-6.636 0 16.588 16.588 0 1 1 33.177 0 3.318 3.318 0 0 1-3.318 3.318",fillRule:"evenodd"})),L||(L=B.createElement("path",{d:"M335.428 516.783h79.625a26.54 26.54 0 0 0 26.541-26.542v-92.895H361.97a26.54 26.54 0 0 0-26.542 26.542Z",fill:"#ffff50",fillRule:"evenodd"})),O||(O=B.createElement("path",{d:"M421.714 438.485h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.541h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0-66.106h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m0 26.294h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m19.88-122.607c-.016 0-.03-.008-.045-.007-4.1.14-6.04 4.241-7.753 7.86-1.786 3.783-3.168 6.242-5.432 6.167-2.506-.09-3.94-2.922-5.458-5.918-1.744-3.443-3.734-7.347-7.913-7.201-4.042.138-5.99 3.708-7.706 6.857-1.828 3.355-3.071 5.394-5.47 5.3-2.557-.093-3.916-2.395-5.488-5.06-1.753-2.967-3.78-6.304-7.878-6.19-3.973.137-5.925 3.166-7.648 5.84-1.822 2.826-3.098 4.549-5.527 4.447-2.618-.093-3.97-2.004-5.535-4.216-1.757-2.486-3.737-5.3-7.823-5.163-3.886.133-5.838 2.615-7.56 4.802-1.634 2.075-2.91 3.718-5.611 3.615a1.328 1.328 0 1 0-.096 2.654c4.004.134 6.032-2.389 7.793-4.628 1.562-1.985 2.91-3.698 5.564-3.789 2.556-.108 3.754 1.48 5.567 4.041 1.721 2.434 3.675 5.195 7.606 5.337 4.118.138 6.099-2.94 7.853-5.663 1.569-2.434 2.923-4.535 5.508-4.624 2.38-.088 3.674 1.792 5.5 4.885 1.722 2.916 3.671 6.22 7.68 6.365 4.147.143 6.15-3.477 7.895-6.682 1.511-2.77 2.938-5.388 5.466-5.475 2.38-.056 3.62 2.116 5.456 5.746 1.714 3.388 3.658 7.226 7.73 7.373l.224.004c4.066 0 5.996-4.08 7.704-7.689 1.511-3.198 2.942-6.21 5.397-6.334Z",fillRule:"evenodd"})),G||(G=B.createElement("path",{d:"M308.887 516.783h53.083V463.7h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),V||(V=B.createElement("path",{d:"M388.511 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.098-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.444-.446a6.624 6.624 0 1 0-11.397-6.564c-.197-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.102 0c-.196.05-.394.097-.59.152a6.628 6.628 0 1 0-11.397 6.564 26.528 26.528 0 1 0 44.231 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),_||(_=B.createElement("path",{d:"M308.887 437.158h53.083v-26.541h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),S||(S=B.createElement("path",{d:"M375.24 427.205a3.318 3.318 0 1 0 0-6.635 3.411 3.411 0 0 0-.423.042c-.026-.1-.05-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2.001.688q-.11-.113-.223-.223a3.282 3.282 0 0 0 .671-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.671-1.982q.113-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .423.042",fill:"#44d860",fillRule:"evenodd"})),j||(j=B.createElement("path",{d:"M388.511 329.334a3.603 3.603 0 0 1-.65-.067 3.344 3.344 0 0 1-.624-.185 3.447 3.447 0 0 1-.572-.306 5.027 5.027 0 0 1-.504-.411 3.887 3.887 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.602 3.602 0 0 1 .067-.65 3.826 3.826 0 0 1 .184-.624 3.489 3.489 0 0 1 .307-.57 3.197 3.197 0 0 1 .914-.916 3.447 3.447 0 0 1 .572-.305 3.344 3.344 0 0 1 .624-.186 3.07 3.07 0 0 1 1.3 0 3.223 3.223 0 0 1 1.195.49 5.028 5.028 0 0 1 .504.412 4.88 4.88 0 0 1 .411.504 3.382 3.382 0 0 1 .306.571 3.478 3.478 0 0 1 .252 1.274 3.364 3.364 0 0 1-.969 2.349 5.027 5.027 0 0 1-.504.411 3.306 3.306 0 0 1-1.845.558m26.542-1.66a3.388 3.388 0 0 1-2.35-.968 5.042 5.042 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.387 3.387 0 0 1 .967-2.349 5.026 5.026 0 0 1 .505-.411 3.447 3.447 0 0 1 .572-.305 3.343 3.343 0 0 1 .623-.186 3.07 3.07 0 0 1 1.3 0 3.224 3.224 0 0 1 1.195.49 5.026 5.026 0 0 1 .504.412 3.388 3.388 0 0 1 .97 2.35 3.726 3.726 0 0 1-.067.65 3.374 3.374 0 0 1-.186.623 4.715 4.715 0 0 1-.305.57 4.88 4.88 0 0 1-.412.505 5.026 5.026 0 0 1-.504.412 3.305 3.305 0 0 1-1.844.557",fillRule:"evenodd"})))}},8391:(e,t,a)=>{a.r(t),a.d(t,{default:()=>g});var l=a(7294),r=a(6010),n=a(9960),c=a(2263),m=a(7961),h=a(7462);const d={features:"features_t9lD",featureSvg:"featureSvg_GfXr"},i=[{title:l.createElement("a",{href:"https://makinarocks.ai/"},"MakinaRocks"),Svg:a(4002).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"Sponsored by MakinaRocks"),"\uc774 \ud504\ub85c\uc81d\ud2b8\ub294 MakinaRocks\uc758 \uc9c0\uc6d0\uc744 \ubc1b\uc544 \uc81c\uc791\ub418\uc5c8\uc2b5\ub2c8\ub2e4.")},{title:l.createElement("a",{href:"https://mlops-for-mle.github.io/tutorial"},"MLOps for MLE"),Svg:a(9722).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"ML Engineer\ub97c \uc704\ud55c MLOps Release!"),"\uad6c\uae00\uc5d0\uc11c \uc81c\uc548\ud55c MLOps 0\ub2e8\uacc4\ub97c \uc9c1\uc811 \uad6c\ud604\ud558\uba70 MLOps \uac00 \ubb34\uc5c7\uc778\uc9c0 \uacf5\ubd80\ud560 \uc218 \uc788\ub294 \ud29c\ud1a0\ub9ac\uc5bc\uc744 \uc624\ud508\ud588\uc2b5\ub2c8\ub2e4!")}];function f(e){let{title:t,Svg:a,description:n}=e;return l.createElement("div",{className:(0,r.Z)("col col--6")},l.createElement("div",{className:"text--center"},l.createElement(a,{className:d.featureSvg,role:"img"})),l.createElement("div",{className:"text--center padding-horiz--md"},l.createElement("h3",null,t),l.createElement("p",null,n)))}function s(){return l.createElement("section",{className:d.features},l.createElement("div",{className:"container"},l.createElement("div",{className:"row"},i.map(((e,t)=>l.createElement(f,(0,h.Z)({key:t},e)))))))}const o={heroBanner:"heroBanner_qdFl",buttons:"buttons_AeoN"};function E(){const{siteConfig:e}=(0,c.Z)();return l.createElement("header",{className:(0,r.Z)("hero hero--primary",o.heroBanner)},l.createElement("div",{className:"container"},l.createElement("h1",{className:"hero__title"},e.title),l.createElement("p",{className:"hero__subtitle"},e.tagline),l.createElement("div",{className:o.buttons},l.createElement(n.Z,{className:"button button--secondary button--lg",to:"/docs/introduction/intro"},"Let's Start!"))))}function g(){const{siteConfig:e}=(0,c.Z)();return l.createElement(m.Z,{title:"MLOps for ALL",description:"Description will go into a meta tag in "},l.createElement(E,null),l.createElement("main",null,l.createElement(s,null)))}}}]); \ No newline at end of file diff --git a/assets/js/1e99a105.d6e8b39f.js b/assets/js/1e99a105.7a34c251.js similarity index 99% rename from assets/js/1e99a105.d6e8b39f.js rename to assets/js/1e99a105.7a34c251.js index d15fef8b..7f956e1a 100644 --- a/assets/js/1e99a105.d6e8b39f.js +++ b/assets/js/1e99a105.7a34c251.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7367],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),d=a,m=u["".concat(p,".").concat(d)]||u[d]||k[d]||i;return n?r.createElement(m,o(o({ref:t},c),{},{components:n})):r.createElement(m,o({ref:t},c))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,o=new Array(i);o[0]=d;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:a,o[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var r=n(7462),a=(n(7294),n(3905));const i={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,l={unversionedId:"prerequisites/docker/introduction",id:"version-1.0/prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/docs/1.0/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/introduction.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/docs/1.0/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/docs/1.0/prerequisites/docker/"}},p={},s=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:2},{value:"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984",id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984",level:3},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:4},{value:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30",id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30",level:3},{value:"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"References",id:"references",level:3}],c={toc:s},u="wrapper";function k(e){let{components:t,...i}=e;return(0,a.kt)(u,(0,r.Z)({},c,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,a.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc11c\ube44\uc2a4\ud654\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378 \uac1c\ubc1c \uc678\uc5d0\ub3c4 \ub9ce\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ubd80\uac00\uc801\uc778")," \uae30\ub2a5\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ud559\uc2b5 \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ubaa8\ub378 \ud559\uc2b5 \uba85\ub839\uc758 \uc2a4\ucf00\uc904 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uc758 Reproducibility \ubcf4\uc7a5"))),(0,a.kt)("li",{parentName:"ol"},"\ubc30\ud3ec \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ud2b8\ub798\ud53d \ubd84\uc0b0"),(0,a.kt)("li",{parentName:"ul"},"\uc11c\ube44\uc2a4 \uc7a5\uc560 \ubaa8\ub2c8\ud130\ub9c1"),(0,a.kt)("li",{parentName:"ul"},"\uc7a5\uc560 \uc2dc \ud2b8\ub7ec\ube14\uc288\ud305")))),(0,a.kt)("p",null,"\ub2e4\ud589\ud788\ub3c4 \uc774\ub7f0 \uae30\ub2a5\ub4e4\uc5d0 \ub300\ud55c needs\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \ucabd\uc5d0\uc11c \uc774\ubbf8 \ub9ce\uc740 \uace0\ubbfc\uc744 \uac70\uccd0 \ubc1c\uc804\ub418\uc5b4 \uc654\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud560 \ub54c\ub3c4 \uc774\ub7f0 \uace0\ubbfc\uc758 \uacb0\uacfc\ubb3c\ub4e4\uc744 \ud65c\uc6a9\ud558\uba74 \ud070 \ub3c4\uc6c0\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nMLOps\uc5d0\uc11c \ub300\ud45c\uc801\uc73c\ub85c \ud65c\uc6a9\ud558\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uc774 \ubc14\ub85c \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("h3",{id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984"},"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984"),(0,a.kt)("p",null,"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uac01\uac01 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\ub2a5\uacfc \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158(Container Orchestration) \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ub300\ud45c \uc18c\ud504\ud2b8\uc6e8\uc5b4(\uc81c\ud488)\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,a.kt)("p",null,"\ub3c4\ucee4\ub294 \uacfc\uac70\uc5d0 \ub300\uc138\uc600\uc9c0\ub9cc \uc720\ub8cc\ud654 \uad00\ub828 \uc815\ucc45\ub4e4\uc744 \ud558\ub098\uc529 \ucd94\uac00\ud558\uba74\uc11c \uc810\uc810 \uc0ac\uc6a9 \ube48\ub3c4\uac00 \ud558\ub77d\uc138\uc785\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc 2022\ub144 3\uc6d4 \uae30\uc900\uc73c\ub85c \uc544\uc9c1\uae4c\uc9c0\ub3c4 \uac00\uc7a5 \uc77c\ubc18\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2019.png",src:n(3193).Z,width:"1600",height:"900"})),(0,a.kt)("center",null," [from sysdig 2019] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2021.png",src:n(8475).Z,width:"750",height:"437"})),(0,a.kt)("center",null," [from sysdig 2021] "),(0,a.kt)("h4",{id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uc9c0\uae08\uae4c\uc9c0\ub294 \ube44\uad50 \ub300\uc0c1\uc870\ucc28 \uac70\uc758 \uc5c6\ub294 \uc81c\ud488\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"cncf-survey.png",src:n(6587).Z,width:"2048",height:"1317"})),(0,a.kt)("center",null," [from cncf survey] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"t4-ai.png",src:n(2781).Z,width:"926",height:"629"})),(0,a.kt)("center",null," [from t4.ai] "),(0,a.kt)("h3",{id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30"},(0,a.kt)("strong",{parentName:"h3"},"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30")),(0,a.kt)("h4",{id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucd08\uae30 \ub3c4\ucee4 \uac1c\ubc1c\uc2dc\uc5d0\ub294 Docker Engine\uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc758 \ud328\ud0a4\uc9c0"),"\uc5d0 API, CLI, \ub124\ud2b8\uc6cc\ud06c, \uc2a4\ud1a0\ub9ac\uc9c0 \ub4f1 \uc5ec\ub7ec \uae30\ub2a5\ub4e4\uc744 \ubaa8\ub450 \ud3ec\ud568\ud588\uc73c\ub098, ",(0,a.kt)("strong",{parentName:"p"},"MSA")," \uc758 \ucca0\ud559\uc744 \ub2f4\uc544 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc529 \ubd84\ub9ac"),"\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ucd08\uae30\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654\ub97c \uc704\ud574 Docker Engine\uc744 \ub0b4\uc7a5\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub3c4\ucee4 \ubc84\uc804\uc774 \uc5c5\ub370\uc774\ud2b8\ub420 \ub54c\ub9c8\ub2e4 Docker Engine \uc758 \uc778\ud130\ud398\uc774\uc2a4\uac00 \ubcc0\uacbd\ub418\uc5b4 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \ud06c\uac8c \uc601\ud5a5\uc744 \ubc1b\ub294 \uc77c\uc774 \uacc4\uc18d\ud574\uc11c \ubc1c\uc0dd\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,a.kt)("p",null,"\uadf8\ub798\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc774\ub7f0 \ubd88\ud3b8\ud568\uc744 \ud574\uc18c"),"\ud558\uace0\uc790, \ub3c4\ucee4\ub97c \uc911\uc2ec\uc73c\ub85c \uad6c\uae00 \ub4f1 \ucee8\ud14c\uc774\ub108 \uae30\uc220\uc5d0 \uad00\uc2ec\uc788\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec \uc9d1\ub2e8"),"\ub4e4\uc774 \ud55c\ub370 \ubaa8\uc5ec ",(0,a.kt)("strong",{parentName:"p"},"Open Container Initiative,")," \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"OCI"),"\ub77c\ub294 \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud558\uc5ec \ucee8\ud14c\uc774\ub108\uc5d0 \uad00\ud55c ",(0,a.kt)("strong",{parentName:"p"},"\ud45c\uc900"),"\uc744 \uc815\ud558\ub294 \uc77c\ub4e4\uc744 \uc2dc\uc791\ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\uc5d0\uc11c\ub3c4 \uc778\ud130\ud398\uc774\uc2a4\ub97c ",(0,a.kt)("strong",{parentName:"p"},"\ud55c \ubc88 \ub354 \ubd84\ub9ac"),"\ud574\uc11c, OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\ub294 ",(0,a.kt)("strong",{parentName:"p"},"containerd"),"\ub77c\ub294 Container Runtime \ub97c \uac1c\ubc1c\ud558\uace0, ",(0,a.kt)("strong",{parentName:"p"},"dockerd")," \uac00 containerd \uc758 API \ub97c \ud638\ucd9c\ud558\ub3c4\ub85d \ucd94\uc0c1\ud654 \ub808\uc774\uc5b4\ub97c \ucd94\uac00\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7ec\ud55c \ud750\ub984\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c\ub3c4 \uc774\uc81c\ubd80\ud130\ub294 \ub3c4\ucee4\ub9cc\uc744 \uc9c0\uc6d0\ud558\uc9c0 \uc54a\uace0, ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900\uc744")," \uc900\uc218\ud558\uace0, \uc815\ud574\uc9c4 \uc2a4\ud399\uc744 \uc9c0\ud0a4\ub294 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc740 \ubb34\uc5c7\uc774\ub4e0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d, Container Runtime Interface, \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"CRI \uc2a4\ud399"),"\uc744 \ubc84\uc804 1.5\ubd80\ud130 \uc81c\uacf5\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"cri-o"},"CRI-O"),(0,a.kt)("p",null,"Red Hat, Intel, SUSE, IBM\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900+CRI \uc2a4\ud399\uc744")," \ub530\ub77c Kubernetes \uc804\uc6a9 Container Runtime \uc744 \ubaa9\uc801\uc73c\ub85c \uac1c\ubc1c\ud55c \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 Docker Engine \uc744 \ub514\ud3f4\ud2b8 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc0ac\uc6a9\ud574\uc654\uc9c0\ub9cc, \ub3c4\ucee4\uc758 API \uac00 ",(0,a.kt)("strong",{parentName:"p"},"CRI")," \uc2a4\ud399\uc5d0 \ub9de\uc9c0 \uc54a\uc544(",(0,a.kt)("em",{parentName:"p"},"OCI \ub294 \ub530\ub984"),") \ub3c4\ucee4\uc758 API\ub97c ",(0,a.kt)("strong",{parentName:"p"},"CRI"),"\uc640 \ud638\ud658\ub418\uac8c \ubc14\uafd4\uc8fc\ub294 ",(0,a.kt)("strong",{parentName:"p"},"dockershim"),"\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc790\uccb4\uc801\uc73c\ub85c \uac1c\ubc1c \ubc0f \uc9c0\uc6d0\ud574\uc654\uc5c8\ub294\ub370,(",(0,a.kt)("em",{parentName:"p"},"\ub3c4\ucee4 \uce21\uc774 \uc544\ub2c8\ub77c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uce21\uc5d0\uc11c \uc9c0\uc6d0\ud588\ub2e4\ub294 \uc810\uc774 \uad49\uc7a5\ud788 \ud070 \uc9d0\uc774\uc5c8\uc2b5\ub2c8\ub2e4."),") \uc774\uac78 \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,a.kt)("strong",{parentName:"p"},"v1.20 \ubd80\ud130\ub294 Deprecated\ud558\uace0,")," ",(0,a.kt)("strong",{parentName:"p"},"v1.23 \ubd80\ud130\ub294 \uc9c0\uc6d0\uc744 \ud3ec\uae30"),"\ud558\uae30\ub85c \uacb0\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"v1.23 \uc740 2021 \ub144 12\uc6d4 \ub9b4\ub9ac\uc988")),(0,a.kt)("p",null,"\uadf8\ub798\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 v1.23 \ubd80\ud130\ub294 \ub3c4\ucee4\ub97c native \ud558\uac8c \uc4f8 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc ",(0,a.kt)("strong",{parentName:"p"},"\uc0ac\uc6a9\uc790\ub4e4\uc740 \uc774\ub7f0 \ubcc0\ud654\uc5d0 \ud06c\uac8c \uad00\ub828\uc774 \uc788\uc9c4 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc65c\ub0d0\ud558\uba74 Docker Engine\uc744 \ud1b5\ud574 \ub9cc\ub4e4\uc5b4\uc9c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\uae30 \ub54c\ubb38\uc5d0, \ucfe0\ubc84\ub124\ud2f0\uc2a4\uac00 \uc5b4\ub5a4 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc774\ub8e8\uc5b4\uc838\uc788\ub4e0 \uc0ac\uc6a9 \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,a.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}k.isMDXComponent=!0},6587:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},3193:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},8475:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},2781:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7367],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),d=a,m=u["".concat(p,".").concat(d)]||u[d]||k[d]||i;return n?r.createElement(m,o(o({ref:t},c),{},{components:n})):r.createElement(m,o({ref:t},c))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,o=new Array(i);o[0]=d;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:a,o[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var r=n(7462),a=(n(7294),n(3905));const i={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,l={unversionedId:"prerequisites/docker/introduction",id:"version-1.0/prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/docs/1.0/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/introduction.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/docs/1.0/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/docs/1.0/prerequisites/docker/"}},p={},s=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:2},{value:"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984",id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984",level:3},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:4},{value:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30",id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30",level:3},{value:"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"References",id:"references",level:3}],c={toc:s},u="wrapper";function k(e){let{components:t,...i}=e;return(0,a.kt)(u,(0,r.Z)({},c,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,a.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc11c\ube44\uc2a4\ud654\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378 \uac1c\ubc1c \uc678\uc5d0\ub3c4 \ub9ce\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ubd80\uac00\uc801\uc778")," \uae30\ub2a5\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ud559\uc2b5 \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ubaa8\ub378 \ud559\uc2b5 \uba85\ub839\uc758 \uc2a4\ucf00\uc904 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uc758 Reproducibility \ubcf4\uc7a5"))),(0,a.kt)("li",{parentName:"ol"},"\ubc30\ud3ec \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ud2b8\ub798\ud53d \ubd84\uc0b0"),(0,a.kt)("li",{parentName:"ul"},"\uc11c\ube44\uc2a4 \uc7a5\uc560 \ubaa8\ub2c8\ud130\ub9c1"),(0,a.kt)("li",{parentName:"ul"},"\uc7a5\uc560 \uc2dc \ud2b8\ub7ec\ube14\uc288\ud305")))),(0,a.kt)("p",null,"\ub2e4\ud589\ud788\ub3c4 \uc774\ub7f0 \uae30\ub2a5\ub4e4\uc5d0 \ub300\ud55c needs\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \ucabd\uc5d0\uc11c \uc774\ubbf8 \ub9ce\uc740 \uace0\ubbfc\uc744 \uac70\uccd0 \ubc1c\uc804\ub418\uc5b4 \uc654\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud560 \ub54c\ub3c4 \uc774\ub7f0 \uace0\ubbfc\uc758 \uacb0\uacfc\ubb3c\ub4e4\uc744 \ud65c\uc6a9\ud558\uba74 \ud070 \ub3c4\uc6c0\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nMLOps\uc5d0\uc11c \ub300\ud45c\uc801\uc73c\ub85c \ud65c\uc6a9\ud558\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uc774 \ubc14\ub85c \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("h3",{id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984"},"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984"),(0,a.kt)("p",null,"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uac01\uac01 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\ub2a5\uacfc \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158(Container Orchestration) \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ub300\ud45c \uc18c\ud504\ud2b8\uc6e8\uc5b4(\uc81c\ud488)\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,a.kt)("p",null,"\ub3c4\ucee4\ub294 \uacfc\uac70\uc5d0 \ub300\uc138\uc600\uc9c0\ub9cc \uc720\ub8cc\ud654 \uad00\ub828 \uc815\ucc45\ub4e4\uc744 \ud558\ub098\uc529 \ucd94\uac00\ud558\uba74\uc11c \uc810\uc810 \uc0ac\uc6a9 \ube48\ub3c4\uac00 \ud558\ub77d\uc138\uc785\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc 2022\ub144 3\uc6d4 \uae30\uc900\uc73c\ub85c \uc544\uc9c1\uae4c\uc9c0\ub3c4 \uac00\uc7a5 \uc77c\ubc18\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2019.png",src:n(3193).Z,width:"1600",height:"900"})),(0,a.kt)("center",null," [from sysdig 2019] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2021.png",src:n(8475).Z,width:"750",height:"437"})),(0,a.kt)("center",null," [from sysdig 2021] "),(0,a.kt)("h4",{id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uc9c0\uae08\uae4c\uc9c0\ub294 \ube44\uad50 \ub300\uc0c1\uc870\ucc28 \uac70\uc758 \uc5c6\ub294 \uc81c\ud488\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"cncf-survey.png",src:n(6587).Z,width:"2048",height:"1317"})),(0,a.kt)("center",null," [from cncf survey] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"t4-ai.png",src:n(2781).Z,width:"926",height:"629"})),(0,a.kt)("center",null," [from t4.ai] "),(0,a.kt)("h3",{id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30"},(0,a.kt)("strong",{parentName:"h3"},"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30")),(0,a.kt)("h4",{id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucd08\uae30 \ub3c4\ucee4 \uac1c\ubc1c\uc2dc\uc5d0\ub294 Docker Engine\uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc758 \ud328\ud0a4\uc9c0"),"\uc5d0 API, CLI, \ub124\ud2b8\uc6cc\ud06c, \uc2a4\ud1a0\ub9ac\uc9c0 \ub4f1 \uc5ec\ub7ec \uae30\ub2a5\ub4e4\uc744 \ubaa8\ub450 \ud3ec\ud568\ud588\uc73c\ub098, ",(0,a.kt)("strong",{parentName:"p"},"MSA")," \uc758 \ucca0\ud559\uc744 \ub2f4\uc544 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc529 \ubd84\ub9ac"),"\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ucd08\uae30\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654\ub97c \uc704\ud574 Docker Engine\uc744 \ub0b4\uc7a5\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub3c4\ucee4 \ubc84\uc804\uc774 \uc5c5\ub370\uc774\ud2b8\ub420 \ub54c\ub9c8\ub2e4 Docker Engine \uc758 \uc778\ud130\ud398\uc774\uc2a4\uac00 \ubcc0\uacbd\ub418\uc5b4 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \ud06c\uac8c \uc601\ud5a5\uc744 \ubc1b\ub294 \uc77c\uc774 \uacc4\uc18d\ud574\uc11c \ubc1c\uc0dd\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,a.kt)("p",null,"\uadf8\ub798\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc774\ub7f0 \ubd88\ud3b8\ud568\uc744 \ud574\uc18c"),"\ud558\uace0\uc790, \ub3c4\ucee4\ub97c \uc911\uc2ec\uc73c\ub85c \uad6c\uae00 \ub4f1 \ucee8\ud14c\uc774\ub108 \uae30\uc220\uc5d0 \uad00\uc2ec\uc788\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec \uc9d1\ub2e8"),"\ub4e4\uc774 \ud55c\ub370 \ubaa8\uc5ec ",(0,a.kt)("strong",{parentName:"p"},"Open Container Initiative,")," \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"OCI"),"\ub77c\ub294 \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud558\uc5ec \ucee8\ud14c\uc774\ub108\uc5d0 \uad00\ud55c ",(0,a.kt)("strong",{parentName:"p"},"\ud45c\uc900"),"\uc744 \uc815\ud558\ub294 \uc77c\ub4e4\uc744 \uc2dc\uc791\ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\uc5d0\uc11c\ub3c4 \uc778\ud130\ud398\uc774\uc2a4\ub97c ",(0,a.kt)("strong",{parentName:"p"},"\ud55c \ubc88 \ub354 \ubd84\ub9ac"),"\ud574\uc11c, OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\ub294 ",(0,a.kt)("strong",{parentName:"p"},"containerd"),"\ub77c\ub294 Container Runtime \ub97c \uac1c\ubc1c\ud558\uace0, ",(0,a.kt)("strong",{parentName:"p"},"dockerd")," \uac00 containerd \uc758 API \ub97c \ud638\ucd9c\ud558\ub3c4\ub85d \ucd94\uc0c1\ud654 \ub808\uc774\uc5b4\ub97c \ucd94\uac00\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7ec\ud55c \ud750\ub984\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c\ub3c4 \uc774\uc81c\ubd80\ud130\ub294 \ub3c4\ucee4\ub9cc\uc744 \uc9c0\uc6d0\ud558\uc9c0 \uc54a\uace0, ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900\uc744")," \uc900\uc218\ud558\uace0, \uc815\ud574\uc9c4 \uc2a4\ud399\uc744 \uc9c0\ud0a4\ub294 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc740 \ubb34\uc5c7\uc774\ub4e0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d, Container Runtime Interface, \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"CRI \uc2a4\ud399"),"\uc744 \ubc84\uc804 1.5\ubd80\ud130 \uc81c\uacf5\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"cri-o"},"CRI-O"),(0,a.kt)("p",null,"Red Hat, Intel, SUSE, IBM\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900+CRI \uc2a4\ud399\uc744")," \ub530\ub77c Kubernetes \uc804\uc6a9 Container Runtime \uc744 \ubaa9\uc801\uc73c\ub85c \uac1c\ubc1c\ud55c \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 Docker Engine \uc744 \ub514\ud3f4\ud2b8 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc0ac\uc6a9\ud574\uc654\uc9c0\ub9cc, \ub3c4\ucee4\uc758 API \uac00 ",(0,a.kt)("strong",{parentName:"p"},"CRI")," \uc2a4\ud399\uc5d0 \ub9de\uc9c0 \uc54a\uc544(",(0,a.kt)("em",{parentName:"p"},"OCI \ub294 \ub530\ub984"),") \ub3c4\ucee4\uc758 API\ub97c ",(0,a.kt)("strong",{parentName:"p"},"CRI"),"\uc640 \ud638\ud658\ub418\uac8c \ubc14\uafd4\uc8fc\ub294 ",(0,a.kt)("strong",{parentName:"p"},"dockershim"),"\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc790\uccb4\uc801\uc73c\ub85c \uac1c\ubc1c \ubc0f \uc9c0\uc6d0\ud574\uc654\uc5c8\ub294\ub370,(",(0,a.kt)("em",{parentName:"p"},"\ub3c4\ucee4 \uce21\uc774 \uc544\ub2c8\ub77c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uce21\uc5d0\uc11c \uc9c0\uc6d0\ud588\ub2e4\ub294 \uc810\uc774 \uad49\uc7a5\ud788 \ud070 \uc9d0\uc774\uc5c8\uc2b5\ub2c8\ub2e4."),") \uc774\uac78 \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,a.kt)("strong",{parentName:"p"},"v1.20 \ubd80\ud130\ub294 Deprecated\ud558\uace0,")," ",(0,a.kt)("strong",{parentName:"p"},"v1.23 \ubd80\ud130\ub294 \uc9c0\uc6d0\uc744 \ud3ec\uae30"),"\ud558\uae30\ub85c \uacb0\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"v1.23 \uc740 2021 \ub144 12\uc6d4 \ub9b4\ub9ac\uc988")),(0,a.kt)("p",null,"\uadf8\ub798\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 v1.23 \ubd80\ud130\ub294 \ub3c4\ucee4\ub97c native \ud558\uac8c \uc4f8 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc ",(0,a.kt)("strong",{parentName:"p"},"\uc0ac\uc6a9\uc790\ub4e4\uc740 \uc774\ub7f0 \ubcc0\ud654\uc5d0 \ud06c\uac8c \uad00\ub828\uc774 \uc788\uc9c4 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc65c\ub0d0\ud558\uba74 Docker Engine\uc744 \ud1b5\ud574 \ub9cc\ub4e4\uc5b4\uc9c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\uae30 \ub54c\ubb38\uc5d0, \ucfe0\ubc84\ub124\ud2f0\uc2a4\uac00 \uc5b4\ub5a4 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc774\ub8e8\uc5b4\uc838\uc788\ub4e0 \uc0ac\uc6a9 \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,a.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}k.isMDXComponent=!0},6587:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},3193:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},8475:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},2781:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file diff --git a/assets/js/272bb263.94fb9f09.js b/assets/js/272bb263.853807d3.js similarity index 99% rename from assets/js/272bb263.94fb9f09.js rename to assets/js/272bb263.853807d3.js index 8b1b2848..d0dcd97a 100644 --- a/assets/js/272bb263.94fb9f09.js +++ b/assets/js/272bb263.853807d3.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6358],{3905:(e,n,a)=>{a.d(n,{Zo:()=>u,kt:()=>m});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=t.createContext({}),s=function(e){var n=t.useContext(i),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},u=function(e){var n=s(e.components);return t.createElement(i.Provider,{value:n},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,i=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),c=s(a),d=r,m=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return a?t.createElement(m,o(o({ref:n},u),{},{components:a})):t.createElement(m,o({ref:n},u))}));function m(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=d;var p={};for(var i in n)hasOwnProperty.call(n,i)&&(p[i]=n[i]);p.originalType=e,p[c]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{a.r(n),a.d(n,{assets:()=>i,contentTitle:()=>o,default:()=>k,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,p={unversionedId:"prerequisites/docker/command",id:"version-1.0/prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/docs/1.0/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/command.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/docs/1.0/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/docs/1.0/prerequisites/docker/images"}},i={},s=[{value:"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"9. Docker rm",id:"9-docker-rm",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:s},c="wrapper";function k(e){let{components:n,...a}=e;return(0,r.kt)(c,(0,t.Z)({},u,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," sudo \uc5c6\uc774 \uc0ac\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 \uc544\ub798 \uc0ac\uc774\ud2b8\ub97c \ucc38\uace0\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"docker image registry(\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub294 \uc800\uc7a5\uc18c)\ub85c\ubd80\ud130 Docker image \ub97c \ub85c\uceec\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc544\ub798 \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 docker pull\uc5d0\uc11c \uc0ac\uc6a9 \uac00\ub2a5\ud55c argument\ub4e4\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc54c \uc218 \uc788\ub294 \uac83\uc740 \ubc14\ub85c docker pull\uc740 \ub450 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"help\uc5d0\uc11c \ub098\uc628 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a"),", -",(0,r.kt)("inlineCode",{parentName:"p"},"q")," \uc635\uc158\uc744 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NAME \uc55e\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc9c1\uc811 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," \uc774\ubbf8\uc9c0\ub97c pull \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud574\uc11d\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu")," \ub77c\ub294 \uc774\ub984\uc744 \uac00\uc9c4 \uc774\ubbf8\uc9c0 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," \ud0dc\uadf8\uac00 \ub2ec\ub824\uc788\ub294 \uc774\ubbf8\uc9c0\ub97c \uac00\uc838\uc624\ub77c\ub294 \ub73b\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d, \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uba74 ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," \ub77c\ub294 \uc774\ub984\uc758 registry \uc5d0\uc11c ubuntu:18.04 \ub77c\ub294 image \ub97c \uc5ec\ub7ec\ubd84\uc758 \ub178\ud2b8\ubd81\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uac8c\ub429\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ucc38\uace0\uc0ac\ud56d",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\ud6c4 ",(0,r.kt)("a",{parentName:"li",href:"http://docker.io"},"docker.io")," \ub098 public \ud55c docker hub \uc640 \uac19\uc740 registry \ub300\uc2e0\uc5d0, \ud2b9\uc815 ",(0,r.kt)("strong",{parentName:"li"},"private")," \ud55c registry \uc5d0\uc11c docker image \ub97c \uac00\uc838\uc640\uc57c \ud558\ub294 \uacbd\uc6b0\uc5d0\ub294, ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," \uc744 \ud1b5\ud574\uc11c \ud2b9\uc815 registry \ub97c \ubc14\ub77c\ubcf4\ub3c4\ub85d \ud55c \ub4a4, docker pull \uc744 \uc218\ud589\ud558\ub294 \ud615\ud0dc\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ud639\uc740 insecure registry \ub97c \uc124\uc815\ud558\ub294 ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"\ubc29\uc548"),"\ub3c4 \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\ud3d0\uc1c4\ub9dd\uc5d0\uc11c docker image \ub97c ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," \ud30c\uc77c\uacfc \uac19\uc740 \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save")),", ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," \uc640 \uac19\uc740 \uba85\ub839\uc5b4\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0 \uc874\uc7ac\ud558\ub294 docker image \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"docker images\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \ub3c4\ucee4\ub97c \ucd5c\ucd08 \uc124\uce58 \ud6c4 \uc774 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"\uc904 \uc218 \uc788\ub294 argument\uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"-q"),"\ub97c \uc0ac\uc6a9\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," \ub9cc \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"docker ps\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\uac00 \uc5c6\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \uc2e4\ud589\ub418\ub294 \ucee8\ud14c\uc774\ub108\uac00 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"docker run\uc744 \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc6b0\ub9ac\uac00 \ud655\uc778\ud574\uc57c \ud558\ub294 \uac83\uc740 \ubc14\ub85c docker run\uc740 \uc138 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"\uc9c1\uc811 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it")," : ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," \uc635\uc158 + ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," \uc635\uc158",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"container \ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 interactive \ud55c terminal \ub85c \uc811\uc18d\uc2dc\ucf1c\uc8fc\ub294 \uc635\uc158"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name")," : name",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 id \ub300\uc2e0, \uad6c\ubd84\ud558\uae30 \uc27d\ub3c4\ub85d \uc9c0\uc815\ud574\uc8fc\ub294 \uc774\ub984"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 \uc2e4\ud589\ud560 \ucee4\ub9e8\ub4dc\ub85c, ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," \ub294 bash \uc258\uc744 \uc5ec\ub294 \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ucee8\ud14c\uc774\ub108\ub97c \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774 \uc81c \uc55e\uc11c \ubc30\uc6e0\ub358 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\uc2e4\ud589\ub418\uace0 \uc788\ub294 \ucee8\ud14c\uc774\ub108\uac00 \ub098\uc628\ub2e4\uace0 \ud588\uc9c0\ub9cc \uc5b4\uc9f8\uc11c\uc778\uc9c0 \ubc29\uae08 \uc2e4\ud589\ud55c \ucee8\ud14c\uc774\ub108\uac00 \ubcf4\uc774\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uadf8 \uc774\uc720\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub294 \uae30\ubcf8\uac12\uc73c\ub85c \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ubcf4\uc5ec\uc8fc\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \ubcf4\uace0 \uc2f6\ub2e4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," \uc635\uc158\uc744 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\ub3c4 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uba85\ub839\uc744 \ub0b4\ub9ac\uac70\ub098, \ub0b4\ubd80\ub85c \uc811\uc18d\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," \uc635\uc158\uc740 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \uc2e4\ud589\uc2dc\ucf1c\uc11c, \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \uc811\uc18d \uc885\ub8cc\ub97c \ud558\ub354\ub77c\ub3c4, \uacc4\uc18d \uc2e4\ud589 \uc911\uc774 \ub418\ub3c4\ub85d \ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589\uc911\uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589 \uc911\uc784\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \uc2e4\ud589\uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc5d0 \uc811\uc18d\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"\uc774 \uc804\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run"),"\uacfc \ub3d9\uc77c\ud558\uac8c container \ub0b4\ubd80\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"\uc744 \ud1b5\ud574 \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc758 log\ub97c \ud655\uc778\ud558\ub294 \ucee4\ub9e8\ub4dc \uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c test \ub77c\ub294 \uc774\ub984\uc758 busybox \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub85c \uc2e4\ud589\ud558\uc5ec, 1\ucd08\uc5d0 \ud55c \ubc88\uc529 \ud604\uc7ac \uc2dc\uac04\uc744 \ucd9c\ub825\ud558\ub3c4\ub85d \ud588\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 log\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774\ub807\uac8c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc5ec\ud0dc\uae4c\uc9c0 \ucc0d\ud78c log \ubc16\uc5d0 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," \uc635\uc158\uc744 \uc774\uc6a9\ud574 \uacc4\uc18d watch \ud558\uba70 \ucd9c\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"\uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc911\ub2e8\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop")," \uc744 \ud1b5\ud574 \ub3c4\ucee4\ub97c \uc815\uc9c0\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ub2e4\uc2dc \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"\uc704\uc758 \uacb0\uacfc\uc640 \ube44\uad50\ud588\uc744 \ub54c demo2 \ucee8\ud14c\uc774\ub108\uac00 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\uc5d0\uc11c \uc0ac\ub77c\uc9c4 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub3c4 \uc815\uc9c0\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("h2",{id:"9-docker-rm"},"9. Docker rm"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc885\ub8cc\uac00 \ub41c \uc0c1\ud0dc\ub85c \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),"\ub97c \ud1b5\ud574\uc11c \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub3c4 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc65c \uc9c0\uc6cc\uc57c \ud560\uae4c\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\uc885\ub8cc\ub418\uc5b4 \uc788\ub294 \ub3c4\ucee4\uc5d0\ub294 \uc774\uc804\uc5d0 \uc0ac\uc6a9\ud55c \ub370\uc774\ud130\uac00 \uc544\uc9c1 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0 \ub0a8\uc544\uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub798\uc11c restart \ub4f1\uc744 \ud1b5\ud574\uc11c \ucee8\ud14c\uc774\ub108\ub97c \uc7ac\uc2dc\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc774 \uacfc\uc815\uc5d0\uc11c disk\ub97c \uc0ac\uc6a9\ud558\uac8c \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub798\uc11c \uc644\uc804\ud788 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \ucee8\ud14c\uc774\ub108\ub97c \uc9c0\uc6b0\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," \uba85\ub839\uc5b4\ub97c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud604\uc7ac \ucee8\ud14c\uc774\ub108\ub4e4\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 3\uac1c\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"demo3")," \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac1c\ub85c \uc904\uc5c8\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \uc0ad\uc81c\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud604\uc7ac \uc5b4\ub5a4 \uc774\ubbf8\uc9c0\ub4e4\uc774 \ub85c\uceec\uc5d0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"\ub2e4\uc2dc ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images"),"\ub97c \uce60 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}k.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6358],{3905:(e,n,a)=>{a.d(n,{Zo:()=>u,kt:()=>m});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=t.createContext({}),s=function(e){var n=t.useContext(i),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},u=function(e){var n=s(e.components);return t.createElement(i.Provider,{value:n},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,i=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),c=s(a),d=r,m=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return a?t.createElement(m,o(o({ref:n},u),{},{components:a})):t.createElement(m,o({ref:n},u))}));function m(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=d;var p={};for(var i in n)hasOwnProperty.call(n,i)&&(p[i]=n[i]);p.originalType=e,p[c]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{a.r(n),a.d(n,{assets:()=>i,contentTitle:()=>o,default:()=>k,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,p={unversionedId:"prerequisites/docker/command",id:"version-1.0/prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/docs/1.0/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/command.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/docs/1.0/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/docs/1.0/prerequisites/docker/images"}},i={},s=[{value:"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"9. Docker rm",id:"9-docker-rm",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:s},c="wrapper";function k(e){let{components:n,...a}=e;return(0,r.kt)(c,(0,t.Z)({},u,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," sudo \uc5c6\uc774 \uc0ac\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 \uc544\ub798 \uc0ac\uc774\ud2b8\ub97c \ucc38\uace0\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"docker image registry(\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub294 \uc800\uc7a5\uc18c)\ub85c\ubd80\ud130 Docker image \ub97c \ub85c\uceec\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc544\ub798 \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 docker pull\uc5d0\uc11c \uc0ac\uc6a9 \uac00\ub2a5\ud55c argument\ub4e4\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc54c \uc218 \uc788\ub294 \uac83\uc740 \ubc14\ub85c docker pull\uc740 \ub450 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"help\uc5d0\uc11c \ub098\uc628 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a"),", -",(0,r.kt)("inlineCode",{parentName:"p"},"q")," \uc635\uc158\uc744 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NAME \uc55e\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc9c1\uc811 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," \uc774\ubbf8\uc9c0\ub97c pull \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud574\uc11d\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu")," \ub77c\ub294 \uc774\ub984\uc744 \uac00\uc9c4 \uc774\ubbf8\uc9c0 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," \ud0dc\uadf8\uac00 \ub2ec\ub824\uc788\ub294 \uc774\ubbf8\uc9c0\ub97c \uac00\uc838\uc624\ub77c\ub294 \ub73b\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d, \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uba74 ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," \ub77c\ub294 \uc774\ub984\uc758 registry \uc5d0\uc11c ubuntu:18.04 \ub77c\ub294 image \ub97c \uc5ec\ub7ec\ubd84\uc758 \ub178\ud2b8\ubd81\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uac8c\ub429\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ucc38\uace0\uc0ac\ud56d",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\ud6c4 ",(0,r.kt)("a",{parentName:"li",href:"http://docker.io"},"docker.io")," \ub098 public \ud55c docker hub \uc640 \uac19\uc740 registry \ub300\uc2e0\uc5d0, \ud2b9\uc815 ",(0,r.kt)("strong",{parentName:"li"},"private")," \ud55c registry \uc5d0\uc11c docker image \ub97c \uac00\uc838\uc640\uc57c \ud558\ub294 \uacbd\uc6b0\uc5d0\ub294, ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," \uc744 \ud1b5\ud574\uc11c \ud2b9\uc815 registry \ub97c \ubc14\ub77c\ubcf4\ub3c4\ub85d \ud55c \ub4a4, docker pull \uc744 \uc218\ud589\ud558\ub294 \ud615\ud0dc\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ud639\uc740 insecure registry \ub97c \uc124\uc815\ud558\ub294 ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"\ubc29\uc548"),"\ub3c4 \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\ud3d0\uc1c4\ub9dd\uc5d0\uc11c docker image \ub97c ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," \ud30c\uc77c\uacfc \uac19\uc740 \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save")),", ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," \uc640 \uac19\uc740 \uba85\ub839\uc5b4\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0 \uc874\uc7ac\ud558\ub294 docker image \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"docker images\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \ub3c4\ucee4\ub97c \ucd5c\ucd08 \uc124\uce58 \ud6c4 \uc774 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"\uc904 \uc218 \uc788\ub294 argument\uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"-q"),"\ub97c \uc0ac\uc6a9\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," \ub9cc \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"docker ps\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\uac00 \uc5c6\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \uc2e4\ud589\ub418\ub294 \ucee8\ud14c\uc774\ub108\uac00 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"docker run\uc744 \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc6b0\ub9ac\uac00 \ud655\uc778\ud574\uc57c \ud558\ub294 \uac83\uc740 \ubc14\ub85c docker run\uc740 \uc138 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"\uc9c1\uc811 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it")," : ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," \uc635\uc158 + ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," \uc635\uc158",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"container \ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 interactive \ud55c terminal \ub85c \uc811\uc18d\uc2dc\ucf1c\uc8fc\ub294 \uc635\uc158"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name")," : name",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 id \ub300\uc2e0, \uad6c\ubd84\ud558\uae30 \uc27d\ub3c4\ub85d \uc9c0\uc815\ud574\uc8fc\ub294 \uc774\ub984"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 \uc2e4\ud589\ud560 \ucee4\ub9e8\ub4dc\ub85c, ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," \ub294 bash \uc258\uc744 \uc5ec\ub294 \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ucee8\ud14c\uc774\ub108\ub97c \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774 \uc81c \uc55e\uc11c \ubc30\uc6e0\ub358 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\uc2e4\ud589\ub418\uace0 \uc788\ub294 \ucee8\ud14c\uc774\ub108\uac00 \ub098\uc628\ub2e4\uace0 \ud588\uc9c0\ub9cc \uc5b4\uc9f8\uc11c\uc778\uc9c0 \ubc29\uae08 \uc2e4\ud589\ud55c \ucee8\ud14c\uc774\ub108\uac00 \ubcf4\uc774\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uadf8 \uc774\uc720\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub294 \uae30\ubcf8\uac12\uc73c\ub85c \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ubcf4\uc5ec\uc8fc\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \ubcf4\uace0 \uc2f6\ub2e4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," \uc635\uc158\uc744 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\ub3c4 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uba85\ub839\uc744 \ub0b4\ub9ac\uac70\ub098, \ub0b4\ubd80\ub85c \uc811\uc18d\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," \uc635\uc158\uc740 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \uc2e4\ud589\uc2dc\ucf1c\uc11c, \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \uc811\uc18d \uc885\ub8cc\ub97c \ud558\ub354\ub77c\ub3c4, \uacc4\uc18d \uc2e4\ud589 \uc911\uc774 \ub418\ub3c4\ub85d \ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589\uc911\uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589 \uc911\uc784\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \uc2e4\ud589\uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc5d0 \uc811\uc18d\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"\uc774 \uc804\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run"),"\uacfc \ub3d9\uc77c\ud558\uac8c container \ub0b4\ubd80\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"\uc744 \ud1b5\ud574 \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc758 log\ub97c \ud655\uc778\ud558\ub294 \ucee4\ub9e8\ub4dc \uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c test \ub77c\ub294 \uc774\ub984\uc758 busybox \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub85c \uc2e4\ud589\ud558\uc5ec, 1\ucd08\uc5d0 \ud55c \ubc88\uc529 \ud604\uc7ac \uc2dc\uac04\uc744 \ucd9c\ub825\ud558\ub3c4\ub85d \ud588\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 log\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774\ub807\uac8c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc5ec\ud0dc\uae4c\uc9c0 \ucc0d\ud78c log \ubc16\uc5d0 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," \uc635\uc158\uc744 \uc774\uc6a9\ud574 \uacc4\uc18d watch \ud558\uba70 \ucd9c\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"\uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc911\ub2e8\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop")," \uc744 \ud1b5\ud574 \ub3c4\ucee4\ub97c \uc815\uc9c0\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ub2e4\uc2dc \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"\uc704\uc758 \uacb0\uacfc\uc640 \ube44\uad50\ud588\uc744 \ub54c demo2 \ucee8\ud14c\uc774\ub108\uac00 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\uc5d0\uc11c \uc0ac\ub77c\uc9c4 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub3c4 \uc815\uc9c0\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("h2",{id:"9-docker-rm"},"9. Docker rm"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc885\ub8cc\uac00 \ub41c \uc0c1\ud0dc\ub85c \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),"\ub97c \ud1b5\ud574\uc11c \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub3c4 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc65c \uc9c0\uc6cc\uc57c \ud560\uae4c\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\uc885\ub8cc\ub418\uc5b4 \uc788\ub294 \ub3c4\ucee4\uc5d0\ub294 \uc774\uc804\uc5d0 \uc0ac\uc6a9\ud55c \ub370\uc774\ud130\uac00 \uc544\uc9c1 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0 \ub0a8\uc544\uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub798\uc11c restart \ub4f1\uc744 \ud1b5\ud574\uc11c \ucee8\ud14c\uc774\ub108\ub97c \uc7ac\uc2dc\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc774 \uacfc\uc815\uc5d0\uc11c disk\ub97c \uc0ac\uc6a9\ud558\uac8c \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub798\uc11c \uc644\uc804\ud788 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \ucee8\ud14c\uc774\ub108\ub97c \uc9c0\uc6b0\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," \uba85\ub839\uc5b4\ub97c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud604\uc7ac \ucee8\ud14c\uc774\ub108\ub4e4\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 3\uac1c\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"demo3")," \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac1c\ub85c \uc904\uc5c8\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \uc0ad\uc81c\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud604\uc7ac \uc5b4\ub5a4 \uc774\ubbf8\uc9c0\ub4e4\uc774 \ub85c\uceec\uc5d0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"\ub2e4\uc2dc ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images"),"\ub97c \uce60 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/29c7a46b.7c918b5d.js b/assets/js/29c7a46b.1fcec701.js similarity index 99% rename from assets/js/29c7a46b.7c918b5d.js rename to assets/js/29c7a46b.1fcec701.js index 41229ef0..436a63de 100644 --- a/assets/js/29c7a46b.7c918b5d.js +++ b/assets/js/29c7a46b.1fcec701.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1612],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>f});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function p(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var l=r.createContext({}),u=function(e){var n=r.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):p(p({},n),e)),t},m=function(e){var n=u(e.components);return r.createElement(l.Provider,{value:n},e.children)},s="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),s=u(t),d=o,f=s["".concat(l,".").concat(d)]||s[d]||c[d]||a;return t?r.createElement(f,p(p({ref:n},m),{},{components:t})):r.createElement(f,p({ref:n},m))}));function f(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var a=t.length,p=new Array(a);p[0]=d;var i={};for(var l in n)hasOwnProperty.call(n,l)&&(i[l]=n[l]);i.originalType=e,i[s]="string"==typeof e?e:o,p[1]=i;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>p,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var r=t(7462),o=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},p=void 0,i={unversionedId:"kubeflow/basic-component",id:"version-1.0/kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/docs/1.0/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/docs/1.0/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/docs/1.0/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],m={toc:u},s="wrapper";function c(e){let{components:n,...t}=e;return(0,o.kt)(s,(0,r.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"component"},"Component"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub97c \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component Contents) \uc791\uc131"),(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component Wrapper) \uc791\uc131")),(0,o.kt)("p",null,"\uc774\uc81c \uac01 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub294 \uc6b0\ub9ac\uac00 \ud754\ud788 \uc791\uc131\ud558\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ub2e4\ub974\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc22b\uc790\ub97c \uc785\ub825\uc73c\ub85c \ubc1b\uace0 \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud55c \ub4a4 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 \uc5d0\ub7ec\uac00 \ub098\uace0 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294\ub370 \uadf8 \uc774\uc720\ub294 \ucd9c\ub825\ud574\uc57c \ud560 ",(0,o.kt)("inlineCode",{parentName:"p"},"number"),"\uac00 \uc815\uc758\ub418\uc5b4 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),"\uc5d0\uc11c ",(0,o.kt)("inlineCode",{parentName:"p"},"number")," \uc640 \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c \uac12\ub4e4\uc740 ",(0,o.kt)("strong",{parentName:"p"},"Config"),"\ub85c \uc815\uc758\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c Config\ub4e4\uc740 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0\uc11c \uc804\ub2ec\uc774 \ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"\uc774\uc81c \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud560 \uc218 \uc788\ub3c4\ub85d \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ub9cc\ub4e4\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubcc4\ub3c4\uc758 Config \uc5c6\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub85c \uac10\uc300 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,o.kt)("p",null,"\uc774\uc81c \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c Config\ub97c \ub798\ud37c\uc758 argument\ub85c \ucd94\uac00\ud569\ub2c8\ub2e4. \ub2e4\ub9cc, argument \ub9cc\uc744 \uc801\ub294 \uac83\uc774 \uc544\ub2c8\ub77c argument\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub3c4 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4. Kubeflow\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 Kubeflow \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud560 \ub54c, \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc758 \uc5f0\uacb0\uc5d0\uc11c \uc815\ud574\uc9c4 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \ud0c0\uc785\uc774 \uc77c\uce58\ud558\ub294\uc9c0 \uccb4\ud06c\ud569\ub2c8\ub2e4. \ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \ud544\uc694\ub85c \ud558\ub294 \uc785\ub825\uacfc \ub2e4\ub978 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc804\ub2ec\ubc1b\uc740 \ucd9c\ub825\uc758 \ud3ec\ub9f7\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0 \ud30c\uc774\ud504\ub77c\uc778 \uc0dd\uc131\uc744 \ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc81c \ub2e4\uc74c\uacfc \uac19\uc774 argument\uc640 \uadf8 \ud0c0\uc785, \uadf8\ub9ac\uace0 \ubc18\ud658\ud558\ub294 \ud0c0\uc785\uc744 \uc801\uc5b4\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc644\uc131\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ubc18\ud658 \uac12\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\uc740 json\uc5d0\uc11c \ud45c\ud604\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\ub4e4\ub9cc \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub300\ud45c\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\uba70 \uad8c\uc7a5\ud558\ub294 \ud0c0\uc785\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"int"),(0,o.kt)("li",{parentName:"ul"},"float"),(0,o.kt)("li",{parentName:"ul"},"str")),(0,o.kt)("p",null,"\ub9cc\uc57d \ub2e8\uc77c \uac12\uc774 \uc544\ub2cc \uc5ec\ub7ec \uac12\uc744 \ubc18\ud658\ud558\ub824\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"collections.namedtuple")," \uc744 \uc774\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ucc38\uace0 \ud558\uc2dc\uae38 \ubc14\ub78d\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c 2\ub85c \ub098\ub208 \ubaab\uacfc \ub098\uba38\uc9c0\ub97c \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud574\uc57c \ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func")," \ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub807\uac8c \ubcc0\ud658\ub41c \ud615\ud0dc\ub294 \ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\ub85c import \ud558\uc5ec\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,o.kt)("p",null,"\ub9cc\uc57d \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uacf5\uc720\ub97c \ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0 YAML \ud30c\uc77c\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uacf5\uc720\ud574\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \uc704\ud574\uc11c\ub294 \uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud55c \ub4a4 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file")," \uc744 \ud1b5\ud574 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6b0\uc120 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,o.kt)("p",null,"\uc791\uc131\ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,o.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ud30c\uc77c\uc744 \uacf5\uc720\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,o.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull "),": \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \ud658\uacbd \uc815\ubcf4\uac00 \ub2f4\uae34 \uc774\ubbf8\uc9c0\ub97c pull"),(0,o.kt)("li",{parentName:"ol"},"run ",(0,o.kt)("inlineCode",{parentName:"li"},"command"),": pull \ud55c \uc774\ubbf8\uc9c0\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4. ")),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ub97c \uc608\uc2dc\ub85c \ub4e4\uc790\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," \uc758 default image \ub294 python:3.7 \uc774\ubbc0\ub85c \ud574\ub2f9 \uc774\ubbf8\uc9c0\ub97c \uae30\uc900\uc73c\ub85c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. "),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,o.kt)("h2",{id:"references"},"References:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1612],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>f});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function p(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var l=r.createContext({}),u=function(e){var n=r.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):p(p({},n),e)),t},m=function(e){var n=u(e.components);return r.createElement(l.Provider,{value:n},e.children)},s="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),s=u(t),d=o,f=s["".concat(l,".").concat(d)]||s[d]||c[d]||a;return t?r.createElement(f,p(p({ref:n},m),{},{components:t})):r.createElement(f,p({ref:n},m))}));function f(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var a=t.length,p=new Array(a);p[0]=d;var i={};for(var l in n)hasOwnProperty.call(n,l)&&(i[l]=n[l]);i.originalType=e,i[s]="string"==typeof e?e:o,p[1]=i;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>p,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var r=t(7462),o=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},p=void 0,i={unversionedId:"kubeflow/basic-component",id:"version-1.0/kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/docs/1.0/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/docs/1.0/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/docs/1.0/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],m={toc:u},s="wrapper";function c(e){let{components:n,...t}=e;return(0,o.kt)(s,(0,r.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"component"},"Component"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub97c \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component Contents) \uc791\uc131"),(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component Wrapper) \uc791\uc131")),(0,o.kt)("p",null,"\uc774\uc81c \uac01 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub294 \uc6b0\ub9ac\uac00 \ud754\ud788 \uc791\uc131\ud558\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ub2e4\ub974\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc22b\uc790\ub97c \uc785\ub825\uc73c\ub85c \ubc1b\uace0 \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud55c \ub4a4 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 \uc5d0\ub7ec\uac00 \ub098\uace0 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294\ub370 \uadf8 \uc774\uc720\ub294 \ucd9c\ub825\ud574\uc57c \ud560 ",(0,o.kt)("inlineCode",{parentName:"p"},"number"),"\uac00 \uc815\uc758\ub418\uc5b4 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),"\uc5d0\uc11c ",(0,o.kt)("inlineCode",{parentName:"p"},"number")," \uc640 \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c \uac12\ub4e4\uc740 ",(0,o.kt)("strong",{parentName:"p"},"Config"),"\ub85c \uc815\uc758\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c Config\ub4e4\uc740 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0\uc11c \uc804\ub2ec\uc774 \ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"\uc774\uc81c \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud560 \uc218 \uc788\ub3c4\ub85d \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ub9cc\ub4e4\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubcc4\ub3c4\uc758 Config \uc5c6\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub85c \uac10\uc300 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,o.kt)("p",null,"\uc774\uc81c \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c Config\ub97c \ub798\ud37c\uc758 argument\ub85c \ucd94\uac00\ud569\ub2c8\ub2e4. \ub2e4\ub9cc, argument \ub9cc\uc744 \uc801\ub294 \uac83\uc774 \uc544\ub2c8\ub77c argument\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub3c4 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4. Kubeflow\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 Kubeflow \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud560 \ub54c, \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc758 \uc5f0\uacb0\uc5d0\uc11c \uc815\ud574\uc9c4 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \ud0c0\uc785\uc774 \uc77c\uce58\ud558\ub294\uc9c0 \uccb4\ud06c\ud569\ub2c8\ub2e4. \ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \ud544\uc694\ub85c \ud558\ub294 \uc785\ub825\uacfc \ub2e4\ub978 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc804\ub2ec\ubc1b\uc740 \ucd9c\ub825\uc758 \ud3ec\ub9f7\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0 \ud30c\uc774\ud504\ub77c\uc778 \uc0dd\uc131\uc744 \ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc81c \ub2e4\uc74c\uacfc \uac19\uc774 argument\uc640 \uadf8 \ud0c0\uc785, \uadf8\ub9ac\uace0 \ubc18\ud658\ud558\ub294 \ud0c0\uc785\uc744 \uc801\uc5b4\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc644\uc131\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ubc18\ud658 \uac12\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\uc740 json\uc5d0\uc11c \ud45c\ud604\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\ub4e4\ub9cc \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub300\ud45c\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\uba70 \uad8c\uc7a5\ud558\ub294 \ud0c0\uc785\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"int"),(0,o.kt)("li",{parentName:"ul"},"float"),(0,o.kt)("li",{parentName:"ul"},"str")),(0,o.kt)("p",null,"\ub9cc\uc57d \ub2e8\uc77c \uac12\uc774 \uc544\ub2cc \uc5ec\ub7ec \uac12\uc744 \ubc18\ud658\ud558\ub824\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"collections.namedtuple")," \uc744 \uc774\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ucc38\uace0 \ud558\uc2dc\uae38 \ubc14\ub78d\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c 2\ub85c \ub098\ub208 \ubaab\uacfc \ub098\uba38\uc9c0\ub97c \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud574\uc57c \ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func")," \ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub807\uac8c \ubcc0\ud658\ub41c \ud615\ud0dc\ub294 \ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\ub85c import \ud558\uc5ec\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,o.kt)("p",null,"\ub9cc\uc57d \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uacf5\uc720\ub97c \ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0 YAML \ud30c\uc77c\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uacf5\uc720\ud574\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \uc704\ud574\uc11c\ub294 \uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud55c \ub4a4 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file")," \uc744 \ud1b5\ud574 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6b0\uc120 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,o.kt)("p",null,"\uc791\uc131\ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,o.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ud30c\uc77c\uc744 \uacf5\uc720\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,o.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull "),": \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \ud658\uacbd \uc815\ubcf4\uac00 \ub2f4\uae34 \uc774\ubbf8\uc9c0\ub97c pull"),(0,o.kt)("li",{parentName:"ol"},"run ",(0,o.kt)("inlineCode",{parentName:"li"},"command"),": pull \ud55c \uc774\ubbf8\uc9c0\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4. ")),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ub97c \uc608\uc2dc\ub85c \ub4e4\uc790\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," \uc758 default image \ub294 python:3.7 \uc774\ubbc0\ub85c \ud574\ub2f9 \uc774\ubbf8\uc9c0\ub97c \uae30\uc900\uc73c\ub85c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. "),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,o.kt)("h2",{id:"references"},"References:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/2a07449d.2be45199.js b/assets/js/2a07449d.737e4f29.js similarity index 99% rename from assets/js/2a07449d.2be45199.js rename to assets/js/2a07449d.737e4f29.js index b4efe730..0ca8be2c 100644 --- a/assets/js/2a07449d.2be45199.js +++ b/assets/js/2a07449d.737e4f29.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[200],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var l=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);t&&(l=l.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,l)}return n}function o(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=l.createContext({}),s=function(e){var t=l.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return l.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return l.createElement(l.Fragment,{},t)}},m=l.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=a(e,["components","mdxType","originalType","parentName"]),u=s(n),m=i,f=u["".concat(p,".").concat(m)]||u[m]||d[m]||r;return n?l.createElement(f,o(o({ref:t},c),{},{components:n})):l.createElement(f,o({ref:t},c))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,o=new Array(r);o[0]=m;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:i,o[1]=a;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>s});var l=n(7462),i=(n(7294),n(3905));const r={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},o=void 0,a={unversionedId:"introduction/levels",id:"introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/docs/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/docs/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/levels.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/docs/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/docs/introduction/component"}},p={},s=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4",id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4",level:2},{value:"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654",id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654",id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...r}=e;return(0,i.kt)(u,(0,l.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ubc1c\ud45c\ud55c MLOps\uc758 \ub2e8\uacc4\ub97c \ubcf4\uba70 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\uc740 \ubb34\uc5c7\uc778\uc9c0 \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \ubb34\ub824 2015\ub144\ubd80\ud130 MLOps\uc758 \ud544\uc694\uc131\uc744 \ub9d0\ud588\uc2b5\ub2c8\ub2e4. Hidden Technical Debt in Machine Learning Systems \uc740 \uadf8\ub7f0 \uad6c\uae00\uc758 \uc0dd\uac01\uc744 \ub2f4\uc740 \ub17c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper",src:n(7097).Z,width:"840",height:"638"})),(0,i.kt)("p",null,"\uc774 \ub17c\ubb38\uc758 \ud575\uc2ec\uc740 \ubc14\ub85c \uba38\uc2e0\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc81c\ud488\uc744 \ub9cc\ub4dc\ub294\ub370 \uc788\uc5b4\uc11c \uba38\uc2e0\ub7ec\ub2dd \ucf54\ub4dc\ub294 \uc804\uccb4 \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc544\uc8fc \uc77c\ubd80\uc77c \ubfd0\uc774\ub77c\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper-2",src:n(3110).Z,width:"1186",height:"422"})),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \uc774 \ub17c\ubb38\uc744 \ub354 \ubc1c\uc804\uc2dc\ucf1c\uc11c MLOps\ub77c\ub294 \uc6a9\uc5b4\ub97c \ub9cc\ub4e4\uc5b4 \ud655\uc7a5\uc2dc\ucf30\uc2b5\ub2c8\ub2e4. \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,i.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"\uad6c\uae00 \ud074\ub77c\uc6b0\ub4dc \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ubc88 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ub9d0\ud558\ub294 MLOps\ub780 \uc5b4\ub5a4 \uac83\uc778\uc9c0\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud574\ubcf4\uace0\uc790 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uad6c\uae00\uc5d0\uc11c\ub294 MLOps\uc758 \ubc1c\uc804 \ub2e8\uacc4\ub97c \ucd1d 3(0~2)\ub2e8\uacc4\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4. \uac01 \ub2e8\uacc4\ub4e4\uc5d0 \ub300\ud574 \uc124\uba85\ud558\uae30 \uc55e\uc11c \uc774\uc804 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c \uc124\uba85\ud588\ub358 \uac1c\ub150 \uc911 \ud544\uc694\ud55c \ubd80\ubd84\uc744 \ub2e4\uc2dc \ud55c\ubc88 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc6b4\uc601\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uacfc \ubc30\ud3ec \ubc0f \uc6b4\uc601\uc744 \ub2f4\ub2f9\ud558\ub294 \uc6b4\uc601\ud300\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \ud300\uc758 \uc6d0\ud560\ud55c \ud611\uc5c5\uc744 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud558\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc804\uc5d0\ub294 \uac04\ub2e8\ud788 Continuous Integration(CI)/Continuous Deployment(CD)\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\ub2e4\uace0 \ud558\uc600\ub294\ub370, \uc5b4\ub5bb\uac8c CI/CD\ub97c \ud558\ub294\uc9c0\uc5d0 \ub300\ud574\uc11c \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4"},"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-0",src:n(5833).Z,width:"1332",height:"494"})),(0,i.kt)("p",null,"0\ub2e8\uacc4\uc5d0\uc11c \ub450 \ud300\uc740 \u201c\ubaa8\ub378\u201d\uc744 \ud1b5\ud574 \uc18c\ud1b5\ud569\ub2c8\ub2e4. \uba38\uc2e0 \ub7ec\ub2dd\ud300\uc740 \uc313\uc5ec\uc788\ub294 \ub370\uc774\ud130\ub85c \ubaa8\ub378\uc744 \ud559\uc2b5\uc2dc\ud0a4\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc6b4\uc601\ud300\uc5d0\uac8c \uc804\ub2ec \ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uc774\ub807\uac8c \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"toon",src:n(9869).Z,width:"1282",height:"1746"})),(0,i.kt)("p",null,"\ucd08\uae30\uc758 \uba38\uc2e0 \ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc774 \u201c\ubaa8\ub378\u201d \uc911\uc2ec\uc758 \uc18c\ud1b5\uc744 \ud1b5\ud574 \ubc30\ud3ec\ud569\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uc774\ub7f0 \ubc30\ud3ec \ubc29\uc2dd\uc740 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.7\uc744 \uc4f0\uace0 \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.8\uc744 \uc4f4\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0c1\ud669\uc744 \uc790\uc8fc \ubaa9\uaca9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \uc0c1\ud669\uc774 \uc77c\uc5b4\ub098\ub294 \uc774\uc720\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud2b9\uc131\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \ud559\uc2b5\ub41c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uae30 \uc704\ud574\uc11c\ub294 3\uac00\uc9c0\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"\ud30c\uc774\uc36c \ucf54\ub4dc"),(0,i.kt)("li",{parentName:"ol"},"\ud559\uc2b5\ub41c \uac00\uc911\uce58"),(0,i.kt)("li",{parentName:"ol"},"\ud658\uacbd (\ud328\ud0a4\uc9c0, \ubc84\uc804 \ub4f1)")),(0,i.kt)("p",null,"\ub9cc\uc57d \uc774 3\uac00\uc9c0 \uc911 \ud55c \uac00\uc9c0\ub77c\ub3c4 \uc804\ub2ec\uc774 \uc798\ubabb \ub41c\ub2e4\uba74 \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\uac70\ub098 \uc608\uc0c1\ud558\uc9c0 \ubabb\ud55c \uc608\uce21\uc744 \ud560\uc218 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9ce\uc740 \uacbd\uc6b0 \ud658\uacbd\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc544\uc11c \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc740 \ub2e4\uc591\ud55c \uc624\ud508\uc18c\uc2a4\ub97c \uc0ac\uc6a9\ud558\ub294\ub370 \uc624\ud508\uc18c\uc2a4\ub294 \ud2b9\uc131\uc0c1 \uc5b4\ub5a4 \ubc84\uc804\uc744 \uc4f0\ub294\uc9c0\uc5d0 \ub530\ub77c\uc11c \uac19\uc740 \ud568\uc218\ub77c\ub3c4 \uacb0\uacfc\uac00 \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \ubb38\uc81c\ub294 \uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uad00\ub9ac\ud560 \ubaa8\ub378\uc774 \ub9ce\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \uae08\ubc29 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\ub4e4\uc774 \ub9ce\uc544\uc9c0\uace0 \uc11c\ub85c \uc18c\ud1b5\uc5d0 \uc5b4\ub824\uc6c0\uc744 \uacaa\uac8c \ub41c\ub2e4\uba74 \uc131\ub2a5\uc774 \ub354 \uc88b\uc740 \ubaa8\ub378\uc744 \ube60\ub974\uac8c \ubc30\ud3ec\ud560 \uc218 \uc5c6\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654"},"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654"),(0,i.kt)("h3",{id:"pipeline"},"Pipeline"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-pipeline",src:n(287).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub798\uc11c MLOps\uc5d0\uc11c\ub294 \u201c\ud30c\uc774\ud504\ub77c\uc778(Pipeline)\u201d\uc744 \uc774\uc6a9\ud574 \uc774\ub7ec\ud55c \ubb38\uc81c\ub97c \ubc29\uc9c0\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4. MLOps\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub3c4\ucee4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc774\uc6a9\ud574 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ubaa8\ub378 \uac1c\ubc1c\uc5d0 \uc0ac\uc6a9\ud55c \uac83\uacfc \ub3d9\uc77c\ud55c \ud658\uacbd\uc73c\ub85c \ub3d9\uc791\ub418\ub294 \uac83\uc744 \ubcf4\uc7a5\ud569\ub2c8\ub2e4. \uc774\ub97c \ud1b5\ud574\uc11c \ud658\uacbd\uc774 \ub2ec\ub77c\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub7f0\ub370 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubc94\uc6a9\uc801\uc778 \uc6a9\uc5b4\ub85c \uc5ec\ub7ec \ub2e4\uc591\ud55c \ud0dc\uc2a4\ud06c\uc5d0\uc11c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc5ed\ud560\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud30c\uc774\ud504\ub77c\uc778 \ub300\uc2e0 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778(Training Pipeline)\uc774 \ub354 \uc815\ud655\ud558\ub2e4\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-ct.png",src:n(1242).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 Continuous Training(CT) \uac1c\ub150\uc774 \ucd94\uac00\ub429\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 CT\ub294 \uc65c \ud544\uc694\ud560\uae4c\uc694?"),(0,i.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,i.kt)("p",null,"Real World\uc5d0\uc11c \ub370\uc774\ud130\ub294 Data Shift\ub77c\ub294 \ub370\uc774\ud130\uc758 \ubd84\ud3ec\uac00 \uacc4\uc18d\ud574\uc11c \ubcc0\ud558\ub294 \ud2b9\uc9d5\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c \uacfc\uac70\uc5d0 \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 \uc2dc\uac04\uc774 \uc9c0\ub0a8\uc5d0 \ub530\ub77c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc800\ud558\ub418\ub294 \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\ub294 \uac00\uc7a5 \uac04\ub2e8\ud558\uace0 \ud6a8\uacfc\uc801\uc778 \ud574\uacb0\ucc45\uc740 \ubc14\ub85c \ucd5c\uadfc \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \ubcc0\ud654\ub41c \ub370\uc774\ud130 \ubd84\ud3ec\uc5d0 \ub9de\ucdb0\uc11c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\uba74 \ub2e4\uc2dc \uc900\uc218\ud55c \uc131\ub2a5\uc744 \ub0bc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc81c\uc870\uc5c5\uacfc \uac19\uc774 \ud55c \uacf5\uc7a5\uc5d0\uc11c \uc5ec\ub7ec \ub808\uc2dc\ud53c\ub97c \ucc98\ub9ac\ud558\ub294 \uacbd\uc6b0 \ubb34\uc870\uac74 \uc7ac\ud559\uc2b5\uc744 \ud558\ub294 \uac83\uc774 \uc88b\uc9c0 \uc54a\uc744 \uc218 \ub3c4 \uc788\uc2b5\ub2c8\ub2e4. Blind Spot\uc774 \ub300\ud45c\uc801\uc778 \uc608\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \uc790\ub3d9\ucc28 \uc0dd\uc0b0 \ub77c\uc778\uc5d0\uc11c \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uace0 \uc774\ub97c \uc774\uc6a9\ud574 \uc608\uce21\uc744 \uc9c4\ud589\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ub9cc\uc57d \uc804\ud600 \ub2e4\ub978 \ubaa8\ub378 B\uac00 \ub4e4\uc5b4\uc624\uba74 \uc774\uc804\uc5d0 \ubcf4\uc9c0 \ubabb\ud55c \ub370\uc774\ud130 \ud328\ud134\uc774\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\uc81c \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uc5c8\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378\uc740 \uc608\uce21\uc744 \uc9c4\ud589\ud560 \uac83 \uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d \ub370\uc774\ud130\uac00 \ub2e4\uc2dc \ubaa8\ub378 A\ub85c \ubc14\ub010\ub2e4\uba74 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d Retraining \uaddc\uce59\ub9cc \uc788\ub2e4\uba74 \ub2e4\uc2dc \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ucda9\ubd84\ud55c \uc131\ub2a5\uc744 \ubcf4\uc774\uae30 \uc704\ud574\uc11c\ub294 \ucda9\ubd84\ud55c \uc591\uc758 \ub370\uc774\ud130\uac00 \ubaa8\uc5ec\uc57c \ud569\ub2c8\ub2e4. Blind Spot\uc774\ub780 \uc774\ub807\uac8c \ub370\uc774\ud130\ub97c \ubaa8\uc73c\uae30 \uc704\ud574\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uad6c\uac04\uc744 \ub9d0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c Blind Spot\uc744 \ud574\uacb0\ud558\ub294 \ubc29\ubc95\uc740 \uac04\ub2e8\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ubc14\ub85c \ubaa8\ub378 A\uc5d0 \ub300\ud55c \ubaa8\ub378\uc774 \uacfc\uac70\uc5d0 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uace0 \ub9cc\uc57d \uc788\uc5c8\ub2e4\uba74 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ubc14\ub85c \ud559\uc2b5\ud558\uae30 \ubcf4\ub2e4\ub294 \uc774 \uc804 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \ub2e4\uc2dc \uc608\uce21\uc744 \ud558\uba74 \uc774\ub7f0 Blind Spot\uc744 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ubaa8\ub378\uc640 \uac19\uc740 \uba54\ud0c0 \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc790\ub3d9\uc73c\ub85c \ubcc0\ud658\ud574\uc8fc\ub294 \uac83\uc744 Auto Deploy\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc815\ub9ac\ud558\uc790\uba74 CT\ub97c \uc704\ud574\uc11c\ub294 Auto Retraining\uc758\uacfc Auto Deploy \ub450 \uac00\uc9c0 \uae30\ub2a5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4. \ub458\uc740 \uc11c\ub85c\uc758 \ub2e8\uc810\uc744 \ubcf4\uc644\ud574 \uacc4\uc18d\ud574\uc11c \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uc720\uc9c0\ud560 \uc218 \uc788\uac8c \ud569\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654"},"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-2",src:n(2586).Z,width:"1356",height:"862"})),(0,i.kt)("p",null,"2\ub2e8\uacc4\uc758 \uc81c\ubaa9\uc740 CI\uc640 CD\uc758 \uc790\ub3d9\ud654 \uc785\ub2c8\ub2e4. DevOps\uc5d0\uc11c\uc758 CI/CD\uc758 \ub300\uc0c1\uc740 \uc18c\uc2a4 \ucf54\ub4dc\uc785\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 MLOps\ub294 \uc5b4\ub5a4 \uac83\uc774 CI/CD\uc758 \ub300\uc0c1\uc77c\uae4c\uc694?"),(0,i.kt)("p",null,"MLOps\uc758 CI/CD \ub300\uc0c1 \ub610\ud55c \uc18c\uc2a4 \ucf54\ub4dc\uc778 \uac83\uc740 \ub9de\uc9c0\ub9cc \uc870\uae08 \ub354 \uc5c4\ubc00\ud788 \uc815\uc758\ud558\uc790\uba74 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778\uc774\ub77c\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub798\uc11c \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc601\ud5a5\uc774 \uc788\ub294 \ubcc0\ud654\uc5d0 \ub300\ud574\uc11c \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ud559\uc2b5\uc774 \ub418\ub294\uc9c0 (CI), \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\ub294\uc9c0 (CD)\ub97c \ud655\uc778\ud574\uc57c \ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud559\uc2b5\uc744 \ud558\ub294 \ucf54\ub4dc\uc5d0 \uc9c1\uc811\uc801\uc778 \uc218\uc815\uc774 \uc788\ub294 \uacbd\uc6b0\uc5d0\ub294 CI/CD\ub97c \uc9c4\ud589\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucf54\ub4dc \uc678\uc5d0\ub3c4 \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804, \ud30c\uc774\uc36c\uc758 \ubc84\uc804 \ubcc0\uacbd\ub3c4 CI/CD\uc758 \ub300\uc0c1\uc785\ub2c8\ub2e4. \ub9ce\uc740 \uacbd\uc6b0 \uba38\uc2e0 \ub7ec\ub2dd\uc740 \uc624\ud508 \uc18c\uc2a4\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc624\ud508 \uc18c\uc2a4\ub294 \uadf8 \ud2b9\uc131\uc0c1 \ubc84\uc804\uc774 \ubc14\ub00c\uc5c8\uc744 \ub54c \ud568\uc218\uc758 \ub0b4\ubd80 \ub85c\uc9c1\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4. \ubb3c\ub860 \uc5b4\ub290 \uc815\ub3c4 \ubc84\uc804\uc774 \uc62c\ub77c \uac08 \ub54c \uc774\uc640 \uad00\ub828\ub41c \uc54c\ub9bc\uc744 \uc8fc\uc9c0\ub9cc \ud55c \ubc88\uc5d0 \ubc84\uc804\uc774 \ud06c\uac8c \ubc14\ub010\ub2e4\uba74 \uc774\ub7ec\ud55c \ubcc0\ud654\ub97c \ubaa8\ub97c \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\uc5d0\ub3c4 CI/CD\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubaa8\ub378\uc774 \ud559\uc2b5, \ub3d9\uc791\ud558\ub294\uc9c0 \ud655\uc778\uc744 \ud574\uc57c \ud569\ub2c8\ub2e4."))}d.isMDXComponent=!0},5833:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},1242:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},287:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},2586:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},3110:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-2-b10bd2ae7445c3098c9f133131859466.png"},7097:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},9869:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[200],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var l=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);t&&(l=l.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,l)}return n}function o(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=l.createContext({}),s=function(e){var t=l.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return l.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return l.createElement(l.Fragment,{},t)}},m=l.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=a(e,["components","mdxType","originalType","parentName"]),u=s(n),m=i,f=u["".concat(p,".").concat(m)]||u[m]||d[m]||r;return n?l.createElement(f,o(o({ref:t},c),{},{components:n})):l.createElement(f,o({ref:t},c))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,o=new Array(r);o[0]=m;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:i,o[1]=a;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>s});var l=n(7462),i=(n(7294),n(3905));const r={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},o=void 0,a={unversionedId:"introduction/levels",id:"introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/docs/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/docs/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/levels.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/docs/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/docs/introduction/component"}},p={},s=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4",id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4",level:2},{value:"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654",id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654",id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...r}=e;return(0,i.kt)(u,(0,l.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ubc1c\ud45c\ud55c MLOps\uc758 \ub2e8\uacc4\ub97c \ubcf4\uba70 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\uc740 \ubb34\uc5c7\uc778\uc9c0 \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \ubb34\ub824 2015\ub144\ubd80\ud130 MLOps\uc758 \ud544\uc694\uc131\uc744 \ub9d0\ud588\uc2b5\ub2c8\ub2e4. Hidden Technical Debt in Machine Learning Systems \uc740 \uadf8\ub7f0 \uad6c\uae00\uc758 \uc0dd\uac01\uc744 \ub2f4\uc740 \ub17c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper",src:n(7097).Z,width:"840",height:"638"})),(0,i.kt)("p",null,"\uc774 \ub17c\ubb38\uc758 \ud575\uc2ec\uc740 \ubc14\ub85c \uba38\uc2e0\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc81c\ud488\uc744 \ub9cc\ub4dc\ub294\ub370 \uc788\uc5b4\uc11c \uba38\uc2e0\ub7ec\ub2dd \ucf54\ub4dc\ub294 \uc804\uccb4 \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc544\uc8fc \uc77c\ubd80\uc77c \ubfd0\uc774\ub77c\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper-2",src:n(3110).Z,width:"1186",height:"422"})),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \uc774 \ub17c\ubb38\uc744 \ub354 \ubc1c\uc804\uc2dc\ucf1c\uc11c MLOps\ub77c\ub294 \uc6a9\uc5b4\ub97c \ub9cc\ub4e4\uc5b4 \ud655\uc7a5\uc2dc\ucf30\uc2b5\ub2c8\ub2e4. \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,i.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"\uad6c\uae00 \ud074\ub77c\uc6b0\ub4dc \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ubc88 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ub9d0\ud558\ub294 MLOps\ub780 \uc5b4\ub5a4 \uac83\uc778\uc9c0\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud574\ubcf4\uace0\uc790 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uad6c\uae00\uc5d0\uc11c\ub294 MLOps\uc758 \ubc1c\uc804 \ub2e8\uacc4\ub97c \ucd1d 3(0~2)\ub2e8\uacc4\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4. \uac01 \ub2e8\uacc4\ub4e4\uc5d0 \ub300\ud574 \uc124\uba85\ud558\uae30 \uc55e\uc11c \uc774\uc804 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c \uc124\uba85\ud588\ub358 \uac1c\ub150 \uc911 \ud544\uc694\ud55c \ubd80\ubd84\uc744 \ub2e4\uc2dc \ud55c\ubc88 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc6b4\uc601\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uacfc \ubc30\ud3ec \ubc0f \uc6b4\uc601\uc744 \ub2f4\ub2f9\ud558\ub294 \uc6b4\uc601\ud300\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \ud300\uc758 \uc6d0\ud560\ud55c \ud611\uc5c5\uc744 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud558\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc804\uc5d0\ub294 \uac04\ub2e8\ud788 Continuous Integration(CI)/Continuous Deployment(CD)\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\ub2e4\uace0 \ud558\uc600\ub294\ub370, \uc5b4\ub5bb\uac8c CI/CD\ub97c \ud558\ub294\uc9c0\uc5d0 \ub300\ud574\uc11c \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4"},"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-0",src:n(5833).Z,width:"1332",height:"494"})),(0,i.kt)("p",null,"0\ub2e8\uacc4\uc5d0\uc11c \ub450 \ud300\uc740 \u201c\ubaa8\ub378\u201d\uc744 \ud1b5\ud574 \uc18c\ud1b5\ud569\ub2c8\ub2e4. \uba38\uc2e0 \ub7ec\ub2dd\ud300\uc740 \uc313\uc5ec\uc788\ub294 \ub370\uc774\ud130\ub85c \ubaa8\ub378\uc744 \ud559\uc2b5\uc2dc\ud0a4\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc6b4\uc601\ud300\uc5d0\uac8c \uc804\ub2ec \ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uc774\ub807\uac8c \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"toon",src:n(9869).Z,width:"1282",height:"1746"})),(0,i.kt)("p",null,"\ucd08\uae30\uc758 \uba38\uc2e0 \ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc774 \u201c\ubaa8\ub378\u201d \uc911\uc2ec\uc758 \uc18c\ud1b5\uc744 \ud1b5\ud574 \ubc30\ud3ec\ud569\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uc774\ub7f0 \ubc30\ud3ec \ubc29\uc2dd\uc740 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.7\uc744 \uc4f0\uace0 \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.8\uc744 \uc4f4\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0c1\ud669\uc744 \uc790\uc8fc \ubaa9\uaca9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \uc0c1\ud669\uc774 \uc77c\uc5b4\ub098\ub294 \uc774\uc720\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud2b9\uc131\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \ud559\uc2b5\ub41c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uae30 \uc704\ud574\uc11c\ub294 3\uac00\uc9c0\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"\ud30c\uc774\uc36c \ucf54\ub4dc"),(0,i.kt)("li",{parentName:"ol"},"\ud559\uc2b5\ub41c \uac00\uc911\uce58"),(0,i.kt)("li",{parentName:"ol"},"\ud658\uacbd (\ud328\ud0a4\uc9c0, \ubc84\uc804 \ub4f1)")),(0,i.kt)("p",null,"\ub9cc\uc57d \uc774 3\uac00\uc9c0 \uc911 \ud55c \uac00\uc9c0\ub77c\ub3c4 \uc804\ub2ec\uc774 \uc798\ubabb \ub41c\ub2e4\uba74 \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\uac70\ub098 \uc608\uc0c1\ud558\uc9c0 \ubabb\ud55c \uc608\uce21\uc744 \ud560\uc218 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9ce\uc740 \uacbd\uc6b0 \ud658\uacbd\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc544\uc11c \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc740 \ub2e4\uc591\ud55c \uc624\ud508\uc18c\uc2a4\ub97c \uc0ac\uc6a9\ud558\ub294\ub370 \uc624\ud508\uc18c\uc2a4\ub294 \ud2b9\uc131\uc0c1 \uc5b4\ub5a4 \ubc84\uc804\uc744 \uc4f0\ub294\uc9c0\uc5d0 \ub530\ub77c\uc11c \uac19\uc740 \ud568\uc218\ub77c\ub3c4 \uacb0\uacfc\uac00 \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \ubb38\uc81c\ub294 \uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uad00\ub9ac\ud560 \ubaa8\ub378\uc774 \ub9ce\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \uae08\ubc29 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\ub4e4\uc774 \ub9ce\uc544\uc9c0\uace0 \uc11c\ub85c \uc18c\ud1b5\uc5d0 \uc5b4\ub824\uc6c0\uc744 \uacaa\uac8c \ub41c\ub2e4\uba74 \uc131\ub2a5\uc774 \ub354 \uc88b\uc740 \ubaa8\ub378\uc744 \ube60\ub974\uac8c \ubc30\ud3ec\ud560 \uc218 \uc5c6\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654"},"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654"),(0,i.kt)("h3",{id:"pipeline"},"Pipeline"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-pipeline",src:n(287).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub798\uc11c MLOps\uc5d0\uc11c\ub294 \u201c\ud30c\uc774\ud504\ub77c\uc778(Pipeline)\u201d\uc744 \uc774\uc6a9\ud574 \uc774\ub7ec\ud55c \ubb38\uc81c\ub97c \ubc29\uc9c0\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4. MLOps\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub3c4\ucee4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc774\uc6a9\ud574 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ubaa8\ub378 \uac1c\ubc1c\uc5d0 \uc0ac\uc6a9\ud55c \uac83\uacfc \ub3d9\uc77c\ud55c \ud658\uacbd\uc73c\ub85c \ub3d9\uc791\ub418\ub294 \uac83\uc744 \ubcf4\uc7a5\ud569\ub2c8\ub2e4. \uc774\ub97c \ud1b5\ud574\uc11c \ud658\uacbd\uc774 \ub2ec\ub77c\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub7f0\ub370 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubc94\uc6a9\uc801\uc778 \uc6a9\uc5b4\ub85c \uc5ec\ub7ec \ub2e4\uc591\ud55c \ud0dc\uc2a4\ud06c\uc5d0\uc11c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc5ed\ud560\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud30c\uc774\ud504\ub77c\uc778 \ub300\uc2e0 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778(Training Pipeline)\uc774 \ub354 \uc815\ud655\ud558\ub2e4\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-ct.png",src:n(1242).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 Continuous Training(CT) \uac1c\ub150\uc774 \ucd94\uac00\ub429\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 CT\ub294 \uc65c \ud544\uc694\ud560\uae4c\uc694?"),(0,i.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,i.kt)("p",null,"Real World\uc5d0\uc11c \ub370\uc774\ud130\ub294 Data Shift\ub77c\ub294 \ub370\uc774\ud130\uc758 \ubd84\ud3ec\uac00 \uacc4\uc18d\ud574\uc11c \ubcc0\ud558\ub294 \ud2b9\uc9d5\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c \uacfc\uac70\uc5d0 \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 \uc2dc\uac04\uc774 \uc9c0\ub0a8\uc5d0 \ub530\ub77c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc800\ud558\ub418\ub294 \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\ub294 \uac00\uc7a5 \uac04\ub2e8\ud558\uace0 \ud6a8\uacfc\uc801\uc778 \ud574\uacb0\ucc45\uc740 \ubc14\ub85c \ucd5c\uadfc \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \ubcc0\ud654\ub41c \ub370\uc774\ud130 \ubd84\ud3ec\uc5d0 \ub9de\ucdb0\uc11c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\uba74 \ub2e4\uc2dc \uc900\uc218\ud55c \uc131\ub2a5\uc744 \ub0bc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc81c\uc870\uc5c5\uacfc \uac19\uc774 \ud55c \uacf5\uc7a5\uc5d0\uc11c \uc5ec\ub7ec \ub808\uc2dc\ud53c\ub97c \ucc98\ub9ac\ud558\ub294 \uacbd\uc6b0 \ubb34\uc870\uac74 \uc7ac\ud559\uc2b5\uc744 \ud558\ub294 \uac83\uc774 \uc88b\uc9c0 \uc54a\uc744 \uc218 \ub3c4 \uc788\uc2b5\ub2c8\ub2e4. Blind Spot\uc774 \ub300\ud45c\uc801\uc778 \uc608\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \uc790\ub3d9\ucc28 \uc0dd\uc0b0 \ub77c\uc778\uc5d0\uc11c \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uace0 \uc774\ub97c \uc774\uc6a9\ud574 \uc608\uce21\uc744 \uc9c4\ud589\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ub9cc\uc57d \uc804\ud600 \ub2e4\ub978 \ubaa8\ub378 B\uac00 \ub4e4\uc5b4\uc624\uba74 \uc774\uc804\uc5d0 \ubcf4\uc9c0 \ubabb\ud55c \ub370\uc774\ud130 \ud328\ud134\uc774\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\uc81c \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uc5c8\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378\uc740 \uc608\uce21\uc744 \uc9c4\ud589\ud560 \uac83 \uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d \ub370\uc774\ud130\uac00 \ub2e4\uc2dc \ubaa8\ub378 A\ub85c \ubc14\ub010\ub2e4\uba74 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d Retraining \uaddc\uce59\ub9cc \uc788\ub2e4\uba74 \ub2e4\uc2dc \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ucda9\ubd84\ud55c \uc131\ub2a5\uc744 \ubcf4\uc774\uae30 \uc704\ud574\uc11c\ub294 \ucda9\ubd84\ud55c \uc591\uc758 \ub370\uc774\ud130\uac00 \ubaa8\uc5ec\uc57c \ud569\ub2c8\ub2e4. Blind Spot\uc774\ub780 \uc774\ub807\uac8c \ub370\uc774\ud130\ub97c \ubaa8\uc73c\uae30 \uc704\ud574\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uad6c\uac04\uc744 \ub9d0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c Blind Spot\uc744 \ud574\uacb0\ud558\ub294 \ubc29\ubc95\uc740 \uac04\ub2e8\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ubc14\ub85c \ubaa8\ub378 A\uc5d0 \ub300\ud55c \ubaa8\ub378\uc774 \uacfc\uac70\uc5d0 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uace0 \ub9cc\uc57d \uc788\uc5c8\ub2e4\uba74 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ubc14\ub85c \ud559\uc2b5\ud558\uae30 \ubcf4\ub2e4\ub294 \uc774 \uc804 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \ub2e4\uc2dc \uc608\uce21\uc744 \ud558\uba74 \uc774\ub7f0 Blind Spot\uc744 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ubaa8\ub378\uc640 \uac19\uc740 \uba54\ud0c0 \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc790\ub3d9\uc73c\ub85c \ubcc0\ud658\ud574\uc8fc\ub294 \uac83\uc744 Auto Deploy\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc815\ub9ac\ud558\uc790\uba74 CT\ub97c \uc704\ud574\uc11c\ub294 Auto Retraining\uc758\uacfc Auto Deploy \ub450 \uac00\uc9c0 \uae30\ub2a5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4. \ub458\uc740 \uc11c\ub85c\uc758 \ub2e8\uc810\uc744 \ubcf4\uc644\ud574 \uacc4\uc18d\ud574\uc11c \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uc720\uc9c0\ud560 \uc218 \uc788\uac8c \ud569\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654"},"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-2",src:n(2586).Z,width:"1356",height:"862"})),(0,i.kt)("p",null,"2\ub2e8\uacc4\uc758 \uc81c\ubaa9\uc740 CI\uc640 CD\uc758 \uc790\ub3d9\ud654 \uc785\ub2c8\ub2e4. DevOps\uc5d0\uc11c\uc758 CI/CD\uc758 \ub300\uc0c1\uc740 \uc18c\uc2a4 \ucf54\ub4dc\uc785\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 MLOps\ub294 \uc5b4\ub5a4 \uac83\uc774 CI/CD\uc758 \ub300\uc0c1\uc77c\uae4c\uc694?"),(0,i.kt)("p",null,"MLOps\uc758 CI/CD \ub300\uc0c1 \ub610\ud55c \uc18c\uc2a4 \ucf54\ub4dc\uc778 \uac83\uc740 \ub9de\uc9c0\ub9cc \uc870\uae08 \ub354 \uc5c4\ubc00\ud788 \uc815\uc758\ud558\uc790\uba74 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778\uc774\ub77c\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub798\uc11c \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc601\ud5a5\uc774 \uc788\ub294 \ubcc0\ud654\uc5d0 \ub300\ud574\uc11c \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ud559\uc2b5\uc774 \ub418\ub294\uc9c0 (CI), \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\ub294\uc9c0 (CD)\ub97c \ud655\uc778\ud574\uc57c \ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud559\uc2b5\uc744 \ud558\ub294 \ucf54\ub4dc\uc5d0 \uc9c1\uc811\uc801\uc778 \uc218\uc815\uc774 \uc788\ub294 \uacbd\uc6b0\uc5d0\ub294 CI/CD\ub97c \uc9c4\ud589\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucf54\ub4dc \uc678\uc5d0\ub3c4 \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804, \ud30c\uc774\uc36c\uc758 \ubc84\uc804 \ubcc0\uacbd\ub3c4 CI/CD\uc758 \ub300\uc0c1\uc785\ub2c8\ub2e4. \ub9ce\uc740 \uacbd\uc6b0 \uba38\uc2e0 \ub7ec\ub2dd\uc740 \uc624\ud508 \uc18c\uc2a4\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc624\ud508 \uc18c\uc2a4\ub294 \uadf8 \ud2b9\uc131\uc0c1 \ubc84\uc804\uc774 \ubc14\ub00c\uc5c8\uc744 \ub54c \ud568\uc218\uc758 \ub0b4\ubd80 \ub85c\uc9c1\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4. \ubb3c\ub860 \uc5b4\ub290 \uc815\ub3c4 \ubc84\uc804\uc774 \uc62c\ub77c \uac08 \ub54c \uc774\uc640 \uad00\ub828\ub41c \uc54c\ub9bc\uc744 \uc8fc\uc9c0\ub9cc \ud55c \ubc88\uc5d0 \ubc84\uc804\uc774 \ud06c\uac8c \ubc14\ub010\ub2e4\uba74 \uc774\ub7ec\ud55c \ubcc0\ud654\ub97c \ubaa8\ub97c \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\uc5d0\ub3c4 CI/CD\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubaa8\ub378\uc774 \ud559\uc2b5, \ub3d9\uc791\ud558\ub294\uc9c0 \ud655\uc778\uc744 \ud574\uc57c \ud569\ub2c8\ub2e4."))}d.isMDXComponent=!0},5833:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},1242:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},287:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},2586:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},3110:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-2-b10bd2ae7445c3098c9f133131859466.png"},7097:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},9869:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file diff --git a/assets/js/2bbcffe4.f78a9013.js b/assets/js/2bbcffe4.1f0fc702.js similarity index 99% rename from assets/js/2bbcffe4.f78a9013.js rename to assets/js/2bbcffe4.1f0fc702.js index f5117e47..ce418cb2 100644 --- a/assets/js/2bbcffe4.f78a9013.js +++ b/assets/js/2bbcffe4.1f0fc702.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9558],{3905:(e,t,l)=>{l.d(t,{Zo:()=>m,kt:()=>k});var n=l(7294);function r(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function a(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(r[l]=e[l]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(r[l]=e[l])}return r}var p=n.createContext({}),i=function(e){var t=n.useContext(p),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},m=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var l=e.components,r=e.mdxType,a=e.originalType,p=e.parentName,m=s(e,["components","mdxType","originalType","parentName"]),c=i(l),f=r,k=c["".concat(p,".").concat(f)]||c[f]||u[f]||a;return l?n.createElement(k,o(o({ref:t},m),{},{components:l})):n.createElement(k,o({ref:t},m))}));function k(e,t){var l=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=l.length,o=new Array(a);o[0]=f;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:r,o[1]=s;for(var i=2;i{l.r(t),l.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>a,metadata:()=>s,toc:()=>i});var n=l(7462),r=(l(7294),l(3905));const a={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"version-1.0/setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/versioned_docs/version-1.0/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/docs/1.0/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/docs/1.0/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/docs/1.0/setup-components/install-components-seldon"}},p={},i=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"PostgreSQL DB \uc124\uce58",id:"postgresql-db-\uc124\uce58",level:3},{value:"Minio \uc124\uc815",id:"minio-\uc124\uc815",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3}],m={toc:i},c="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,r.kt)("p",null,"MLflow\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4 ML \uc2e4\ud5d8 \uad00\ub9ac \ub3c4\uad6c\uc785\ub2c8\ub2e4. MLflow\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"\uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4")," \uc678\uc5d0\ub3c4 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"ML Model \ud328\ud0a4\uc9d5"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"ML \ubaa8\ub378 \ubc30\ud3ec \uad00\ub9ac"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"ML \ubaa8\ub378 \uc800\uc7a5"),"\uacfc \uac19\uc740 \uae30\ub2a5\ub3c4 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 MLflow\ub97c \uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c MLflow\uc5d0\uc11c \uad00\ub9ac\ud558\ub294 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\uace0 UI\ub97c \uc81c\uacf5\ud558\ub294 MLflow Tracking Server\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"postgresql-db-\uc124\uce58"},"PostgreSQL DB \uc124\uce58"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Backend Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 PostgreSQL DB\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uba3c\uc800 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"\uc774\ub77c\ub294 namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,r.kt)("p",null,"postgresql DB\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace \uc5d0 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1\uac1c\uc758 postgresql \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,r.kt)("h3",{id:"minio-\uc124\uc815"},"Minio \uc124\uc815"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Artifacts Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 Minio\ub294 \uc774\uc804 Kubeflow \uc124\uce58 \ub2e8\uacc4\uc5d0\uc11c \uc124\uce58\ud55c Minio\ub97c \ud65c\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e8, kubeflow \uc6a9\ub3c4\uc640 mlflow \uc6a9\ub3c4\ub97c \ubd84\ub9ac\ud558\uae30 \uc704\ud574, mlflow \uc804\uc6a9 \ubc84\ud0b7(bucket)\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","minio \uc5d0 \uc811\uc18d\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574, \uc6b0\uc120 minio-service \ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"minio-install",src:l(5580).Z,width:"2906",height:"1946"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \ub85c\uadf8\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Username: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio")),(0,r.kt)("li",{parentName:"ul"},"Password: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,r.kt)("p",null,"\uc6b0\uce21 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},(0,r.kt)("inlineCode",{parentName:"strong"},"+"))," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Create Bucket"),"\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create-bucket",src:l(5161).Z,width:"2902",height:"1950"})),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Bucket Name"),"\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub97c \uc785\ub825\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc67c\ucabd\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub77c\ub294 \uc774\ub984\uc758 \ubc84\ud0b7\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-bucket",src:l(8757).Z,width:"2902",height:"1950"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"mlflow-server Helm Chart 0.2.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\uc8fc\uc758"),": \uc704\uc758 helm chart\ub294 MLflow \uc758 backend store \uc640 artifacts store \uc758 \uc811\uc18d \uc815\ubcf4\ub97c kubeflow \uc124\uce58 \uacfc\uc815\uc5d0\uc11c \uc0dd\uc131\ud55c minio\uc640 \uc704\uc758 ",(0,r.kt)("a",{parentName:"li",href:"#postgresql-db-%EC%84%A4%EC%B9%98"},"PostgreSQL DB \uc124\uce58"),"\uc5d0\uc11c \uc0dd\uc131\ud55c postgresql \uc815\ubcf4\ub97c default\ub85c \ud558\uc5ec \uc124\uce58\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ubcc4\uac1c\ub85c \uc0dd\uc131\ud55c DB \ud639\uc740 Object storage\ub97c \ud65c\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0, ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo"),"\ub97c \ucc38\uace0\ud558\uc5ec helm install \uc2dc value\ub97c \ub530\ub85c \uc124\uc815\ud558\uc5ec \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1 \uac1c\uc758 mlflow-server \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c MLflow Server\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-install",src:l(6905).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},5161:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},5580:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},8757:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},6905:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9558],{3905:(e,t,l)=>{l.d(t,{Zo:()=>m,kt:()=>k});var n=l(7294);function r(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function a(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(r[l]=e[l]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(r[l]=e[l])}return r}var p=n.createContext({}),i=function(e){var t=n.useContext(p),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},m=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var l=e.components,r=e.mdxType,a=e.originalType,p=e.parentName,m=s(e,["components","mdxType","originalType","parentName"]),c=i(l),f=r,k=c["".concat(p,".").concat(f)]||c[f]||u[f]||a;return l?n.createElement(k,o(o({ref:t},m),{},{components:l})):n.createElement(k,o({ref:t},m))}));function k(e,t){var l=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=l.length,o=new Array(a);o[0]=f;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[c]="string"==typeof e?e:r,o[1]=s;for(var i=2;i{l.r(t),l.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>a,metadata:()=>s,toc:()=>i});var n=l(7462),r=(l(7294),l(3905));const a={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"version-1.0/setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/versioned_docs/version-1.0/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/docs/1.0/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/docs/1.0/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/docs/1.0/setup-components/install-components-seldon"}},p={},i=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"PostgreSQL DB \uc124\uce58",id:"postgresql-db-\uc124\uce58",level:3},{value:"Minio \uc124\uc815",id:"minio-\uc124\uc815",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3}],m={toc:i},c="wrapper";function u(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,r.kt)("p",null,"MLflow\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4 ML \uc2e4\ud5d8 \uad00\ub9ac \ub3c4\uad6c\uc785\ub2c8\ub2e4. MLflow\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"\uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4")," \uc678\uc5d0\ub3c4 ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"ML Model \ud328\ud0a4\uc9d5"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"ML \ubaa8\ub378 \ubc30\ud3ec \uad00\ub9ac"),", ",(0,r.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"ML \ubaa8\ub378 \uc800\uc7a5"),"\uacfc \uac19\uc740 \uae30\ub2a5\ub3c4 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 MLflow\ub97c \uc2e4\ud5d8 \uad00\ub9ac \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c MLflow\uc5d0\uc11c \uad00\ub9ac\ud558\ub294 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\uace0 UI\ub97c \uc81c\uacf5\ud558\ub294 MLflow Tracking Server\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"postgresql-db-\uc124\uce58"},"PostgreSQL DB \uc124\uce58"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Backend Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 PostgreSQL DB\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uba3c\uc800 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"\uc774\ub77c\ub294 namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,r.kt)("p",null,"postgresql DB\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace \uc5d0 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1\uac1c\uc758 postgresql \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,r.kt)("h3",{id:"minio-\uc124\uc815"},"Minio \uc124\uc815"),(0,r.kt)("p",null,"MLflow Tracking Server\uac00 Artifacts Store\ub85c \uc0ac\uc6a9\ud560 \uc6a9\ub3c4\uc758 Minio\ub294 \uc774\uc804 Kubeflow \uc124\uce58 \ub2e8\uacc4\uc5d0\uc11c \uc124\uce58\ud55c Minio\ub97c \ud65c\uc6a9\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e8, kubeflow \uc6a9\ub3c4\uc640 mlflow \uc6a9\ub3c4\ub97c \ubd84\ub9ac\ud558\uae30 \uc704\ud574, mlflow \uc804\uc6a9 \ubc84\ud0b7(bucket)\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","minio \uc5d0 \uc811\uc18d\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574, \uc6b0\uc120 minio-service \ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"minio-install",src:l(5580).Z,width:"2906",height:"1946"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \ub85c\uadf8\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Username: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio")),(0,r.kt)("li",{parentName:"ul"},"Password: ",(0,r.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,r.kt)("p",null,"\uc6b0\uce21 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},(0,r.kt)("inlineCode",{parentName:"strong"},"+"))," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Create Bucket"),"\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create-bucket",src:l(5161).Z,width:"2902",height:"1950"})),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Bucket Name"),"\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub97c \uc785\ub825\ud558\uc5ec \ubc84\ud0b7\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc67c\ucabd\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow"),"\ub77c\ub294 \uc774\ub984\uc758 \ubc84\ud0b7\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-bucket",src:l(8757).Z,width:"2902",height:"1950"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"mlflow-server Helm Chart 0.2.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\uc8fc\uc758"),": \uc704\uc758 helm chart\ub294 MLflow \uc758 backend store \uc640 artifacts store \uc758 \uc811\uc18d \uc815\ubcf4\ub97c kubeflow \uc124\uce58 \uacfc\uc815\uc5d0\uc11c \uc0dd\uc131\ud55c minio\uc640 \uc704\uc758 ",(0,r.kt)("a",{parentName:"li",href:"#postgresql-db-%EC%84%A4%EC%B9%98"},"PostgreSQL DB \uc124\uce58"),"\uc5d0\uc11c \uc0dd\uc131\ud55c postgresql \uc815\ubcf4\ub97c default\ub85c \ud558\uc5ec \uc124\uce58\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ubcc4\uac1c\ub85c \uc0dd\uc131\ud55c DB \ud639\uc740 Object storage\ub97c \ud65c\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0, ",(0,r.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo"),"\ub97c \ucc38\uace0\ud558\uc5ec helm install \uc2dc value\ub97c \ub530\ub85c \uc124\uc815\ud558\uc5ec \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,r.kt)("p",null,"mlflow-system namespace \uc5d0 1 \uac1c\uc758 mlflow-server \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub41c \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c MLflow Server\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-install",src:l(6905).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},5161:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},5580:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},8757:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},6905:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file diff --git a/assets/js/2d9584e5.dc51ccf7.js b/assets/js/2d9584e5.88941c05.js similarity index 99% rename from assets/js/2d9584e5.dc51ccf7.js rename to assets/js/2d9584e5.88941c05.js index bec6a8eb..aa60e171 100644 --- a/assets/js/2d9584e5.dc51ccf7.js +++ b/assets/js/2d9584e5.88941c05.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3870],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var l=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);t&&(l=l.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,l)}return n}function o(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=l.createContext({}),s=function(e){var t=l.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return l.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return l.createElement(l.Fragment,{},t)}},m=l.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=a(e,["components","mdxType","originalType","parentName"]),u=s(n),m=i,f=u["".concat(p,".").concat(m)]||u[m]||d[m]||r;return n?l.createElement(f,o(o({ref:t},c),{},{components:n})):l.createElement(f,o({ref:t},c))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,o=new Array(r);o[0]=m;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:i,o[1]=a;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>s});var l=n(7462),i=(n(7294),n(3905));const r={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},o=void 0,a={unversionedId:"introduction/levels",id:"version-1.0/introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/versioned_docs/version-1.0/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/docs/1.0/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/levels.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/docs/1.0/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/docs/1.0/introduction/component"}},p={},s=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4",id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4",level:2},{value:"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654",id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654",id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...r}=e;return(0,i.kt)(u,(0,l.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ubc1c\ud45c\ud55c MLOps\uc758 \ub2e8\uacc4\ub97c \ubcf4\uba70 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\uc740 \ubb34\uc5c7\uc778\uc9c0 \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \ubb34\ub824 2015\ub144\ubd80\ud130 MLOps\uc758 \ud544\uc694\uc131\uc744 \ub9d0\ud588\uc2b5\ub2c8\ub2e4. Hidden Technical Debt in Machine Learning Systems \uc740 \uadf8\ub7f0 \uad6c\uae00\uc758 \uc0dd\uac01\uc744 \ub2f4\uc740 \ub17c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper",src:n(765).Z,width:"840",height:"638"})),(0,i.kt)("p",null,"\uc774 \ub17c\ubb38\uc758 \ud575\uc2ec\uc740 \ubc14\ub85c \uba38\uc2e0\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc81c\ud488\uc744 \ub9cc\ub4dc\ub294\ub370 \uc788\uc5b4\uc11c \uba38\uc2e0\ub7ec\ub2dd \ucf54\ub4dc\ub294 \uc804\uccb4 \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc544\uc8fc \uc77c\ubd80\uc77c \ubfd0\uc774\ub77c\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper-2",src:n(2422).Z,width:"1186",height:"422"})),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \uc774 \ub17c\ubb38\uc744 \ub354 \ubc1c\uc804\uc2dc\ucf1c\uc11c MLOps\ub77c\ub294 \uc6a9\uc5b4\ub97c \ub9cc\ub4e4\uc5b4 \ud655\uc7a5\uc2dc\ucf30\uc2b5\ub2c8\ub2e4. \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,i.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"\uad6c\uae00 \ud074\ub77c\uc6b0\ub4dc \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ubc88 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ub9d0\ud558\ub294 MLOps\ub780 \uc5b4\ub5a4 \uac83\uc778\uc9c0\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud574\ubcf4\uace0\uc790 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uad6c\uae00\uc5d0\uc11c\ub294 MLOps\uc758 \ubc1c\uc804 \ub2e8\uacc4\ub97c \ucd1d 3(0~2)\ub2e8\uacc4\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4. \uac01 \ub2e8\uacc4\ub4e4\uc5d0 \ub300\ud574 \uc124\uba85\ud558\uae30 \uc55e\uc11c \uc774\uc804 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c \uc124\uba85\ud588\ub358 \uac1c\ub150 \uc911 \ud544\uc694\ud55c \ubd80\ubd84\uc744 \ub2e4\uc2dc \ud55c\ubc88 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc6b4\uc601\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uacfc \ubc30\ud3ec \ubc0f \uc6b4\uc601\uc744 \ub2f4\ub2f9\ud558\ub294 \uc6b4\uc601\ud300\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \ud300\uc758 \uc6d0\ud560\ud55c \ud611\uc5c5\uc744 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud558\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc804\uc5d0\ub294 \uac04\ub2e8\ud788 Continuous Integration(CI)/Continuous Deployment(CD)\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\ub2e4\uace0 \ud558\uc600\ub294\ub370, \uc5b4\ub5bb\uac8c CI/CD\ub97c \ud558\ub294\uc9c0\uc5d0 \ub300\ud574\uc11c \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4"},"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-0",src:n(5450).Z,width:"1332",height:"494"})),(0,i.kt)("p",null,"0\ub2e8\uacc4\uc5d0\uc11c \ub450 \ud300\uc740 \u201c\ubaa8\ub378\u201d\uc744 \ud1b5\ud574 \uc18c\ud1b5\ud569\ub2c8\ub2e4. \uba38\uc2e0 \ub7ec\ub2dd\ud300\uc740 \uc313\uc5ec\uc788\ub294 \ub370\uc774\ud130\ub85c \ubaa8\ub378\uc744 \ud559\uc2b5\uc2dc\ud0a4\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc6b4\uc601\ud300\uc5d0\uac8c \uc804\ub2ec \ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uc774\ub807\uac8c \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"toon",src:n(2412).Z,width:"1282",height:"1746"})),(0,i.kt)("p",null,"\ucd08\uae30\uc758 \uba38\uc2e0 \ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc774 \u201c\ubaa8\ub378\u201d \uc911\uc2ec\uc758 \uc18c\ud1b5\uc744 \ud1b5\ud574 \ubc30\ud3ec\ud569\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uc774\ub7f0 \ubc30\ud3ec \ubc29\uc2dd\uc740 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.7\uc744 \uc4f0\uace0 \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.8\uc744 \uc4f4\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0c1\ud669\uc744 \uc790\uc8fc \ubaa9\uaca9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \uc0c1\ud669\uc774 \uc77c\uc5b4\ub098\ub294 \uc774\uc720\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud2b9\uc131\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \ud559\uc2b5\ub41c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uae30 \uc704\ud574\uc11c\ub294 3\uac00\uc9c0\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"\ud30c\uc774\uc36c \ucf54\ub4dc"),(0,i.kt)("li",{parentName:"ol"},"\ud559\uc2b5\ub41c \uac00\uc911\uce58"),(0,i.kt)("li",{parentName:"ol"},"\ud658\uacbd (\ud328\ud0a4\uc9c0, \ubc84\uc804 \ub4f1)")),(0,i.kt)("p",null,"\ub9cc\uc57d \uc774 3\uac00\uc9c0 \uc911 \ud55c \uac00\uc9c0\ub77c\ub3c4 \uc804\ub2ec\uc774 \uc798\ubabb \ub41c\ub2e4\uba74 \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\uac70\ub098 \uc608\uc0c1\ud558\uc9c0 \ubabb\ud55c \uc608\uce21\uc744 \ud560\uc218 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9ce\uc740 \uacbd\uc6b0 \ud658\uacbd\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc544\uc11c \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc740 \ub2e4\uc591\ud55c \uc624\ud508\uc18c\uc2a4\ub97c \uc0ac\uc6a9\ud558\ub294\ub370 \uc624\ud508\uc18c\uc2a4\ub294 \ud2b9\uc131\uc0c1 \uc5b4\ub5a4 \ubc84\uc804\uc744 \uc4f0\ub294\uc9c0\uc5d0 \ub530\ub77c\uc11c \uac19\uc740 \ud568\uc218\ub77c\ub3c4 \uacb0\uacfc\uac00 \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \ubb38\uc81c\ub294 \uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uad00\ub9ac\ud560 \ubaa8\ub378\uc774 \ub9ce\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \uae08\ubc29 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\ub4e4\uc774 \ub9ce\uc544\uc9c0\uace0 \uc11c\ub85c \uc18c\ud1b5\uc5d0 \uc5b4\ub824\uc6c0\uc744 \uacaa\uac8c \ub41c\ub2e4\uba74 \uc131\ub2a5\uc774 \ub354 \uc88b\uc740 \ubaa8\ub378\uc744 \ube60\ub974\uac8c \ubc30\ud3ec\ud560 \uc218 \uc5c6\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654"},"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654"),(0,i.kt)("h3",{id:"pipeline"},"Pipeline"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-pipeline",src:n(1972).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub798\uc11c MLOps\uc5d0\uc11c\ub294 \u201c\ud30c\uc774\ud504\ub77c\uc778(Pipeline)\u201d\uc744 \uc774\uc6a9\ud574 \uc774\ub7ec\ud55c \ubb38\uc81c\ub97c \ubc29\uc9c0\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4. MLOps\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub3c4\ucee4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc774\uc6a9\ud574 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ubaa8\ub378 \uac1c\ubc1c\uc5d0 \uc0ac\uc6a9\ud55c \uac83\uacfc \ub3d9\uc77c\ud55c \ud658\uacbd\uc73c\ub85c \ub3d9\uc791\ub418\ub294 \uac83\uc744 \ubcf4\uc7a5\ud569\ub2c8\ub2e4. \uc774\ub97c \ud1b5\ud574\uc11c \ud658\uacbd\uc774 \ub2ec\ub77c\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub7f0\ub370 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubc94\uc6a9\uc801\uc778 \uc6a9\uc5b4\ub85c \uc5ec\ub7ec \ub2e4\uc591\ud55c \ud0dc\uc2a4\ud06c\uc5d0\uc11c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc5ed\ud560\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud30c\uc774\ud504\ub77c\uc778 \ub300\uc2e0 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778(Training Pipeline)\uc774 \ub354 \uc815\ud655\ud558\ub2e4\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-ct.png",src:n(3155).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 Continuous Training(CT) \uac1c\ub150\uc774 \ucd94\uac00\ub429\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 CT\ub294 \uc65c \ud544\uc694\ud560\uae4c\uc694?"),(0,i.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,i.kt)("p",null,"Real World\uc5d0\uc11c \ub370\uc774\ud130\ub294 Data Shift\ub77c\ub294 \ub370\uc774\ud130\uc758 \ubd84\ud3ec\uac00 \uacc4\uc18d\ud574\uc11c \ubcc0\ud558\ub294 \ud2b9\uc9d5\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c \uacfc\uac70\uc5d0 \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 \uc2dc\uac04\uc774 \uc9c0\ub0a8\uc5d0 \ub530\ub77c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc800\ud558\ub418\ub294 \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\ub294 \uac00\uc7a5 \uac04\ub2e8\ud558\uace0 \ud6a8\uacfc\uc801\uc778 \ud574\uacb0\ucc45\uc740 \ubc14\ub85c \ucd5c\uadfc \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \ubcc0\ud654\ub41c \ub370\uc774\ud130 \ubd84\ud3ec\uc5d0 \ub9de\ucdb0\uc11c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\uba74 \ub2e4\uc2dc \uc900\uc218\ud55c \uc131\ub2a5\uc744 \ub0bc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc81c\uc870\uc5c5\uacfc \uac19\uc774 \ud55c \uacf5\uc7a5\uc5d0\uc11c \uc5ec\ub7ec \ub808\uc2dc\ud53c\ub97c \ucc98\ub9ac\ud558\ub294 \uacbd\uc6b0 \ubb34\uc870\uac74 \uc7ac\ud559\uc2b5\uc744 \ud558\ub294 \uac83\uc774 \uc88b\uc9c0 \uc54a\uc744 \uc218 \ub3c4 \uc788\uc2b5\ub2c8\ub2e4. Blind Spot\uc774 \ub300\ud45c\uc801\uc778 \uc608\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \uc790\ub3d9\ucc28 \uc0dd\uc0b0 \ub77c\uc778\uc5d0\uc11c \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uace0 \uc774\ub97c \uc774\uc6a9\ud574 \uc608\uce21\uc744 \uc9c4\ud589\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ub9cc\uc57d \uc804\ud600 \ub2e4\ub978 \ubaa8\ub378 B\uac00 \ub4e4\uc5b4\uc624\uba74 \uc774\uc804\uc5d0 \ubcf4\uc9c0 \ubabb\ud55c \ub370\uc774\ud130 \ud328\ud134\uc774\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\uc81c \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uc5c8\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378\uc740 \uc608\uce21\uc744 \uc9c4\ud589\ud560 \uac83 \uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d \ub370\uc774\ud130\uac00 \ub2e4\uc2dc \ubaa8\ub378 A\ub85c \ubc14\ub010\ub2e4\uba74 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d Retraining \uaddc\uce59\ub9cc \uc788\ub2e4\uba74 \ub2e4\uc2dc \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ucda9\ubd84\ud55c \uc131\ub2a5\uc744 \ubcf4\uc774\uae30 \uc704\ud574\uc11c\ub294 \ucda9\ubd84\ud55c \uc591\uc758 \ub370\uc774\ud130\uac00 \ubaa8\uc5ec\uc57c \ud569\ub2c8\ub2e4. Blind Spot\uc774\ub780 \uc774\ub807\uac8c \ub370\uc774\ud130\ub97c \ubaa8\uc73c\uae30 \uc704\ud574\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uad6c\uac04\uc744 \ub9d0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c Blind Spot\uc744 \ud574\uacb0\ud558\ub294 \ubc29\ubc95\uc740 \uac04\ub2e8\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ubc14\ub85c \ubaa8\ub378 A\uc5d0 \ub300\ud55c \ubaa8\ub378\uc774 \uacfc\uac70\uc5d0 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uace0 \ub9cc\uc57d \uc788\uc5c8\ub2e4\uba74 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ubc14\ub85c \ud559\uc2b5\ud558\uae30 \ubcf4\ub2e4\ub294 \uc774 \uc804 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \ub2e4\uc2dc \uc608\uce21\uc744 \ud558\uba74 \uc774\ub7f0 Blind Spot\uc744 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ubaa8\ub378\uc640 \uac19\uc740 \uba54\ud0c0 \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc790\ub3d9\uc73c\ub85c \ubcc0\ud658\ud574\uc8fc\ub294 \uac83\uc744 Auto Deploy\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc815\ub9ac\ud558\uc790\uba74 CT\ub97c \uc704\ud574\uc11c\ub294 Auto Retraining\uc758\uacfc Auto Deploy \ub450 \uac00\uc9c0 \uae30\ub2a5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4. \ub458\uc740 \uc11c\ub85c\uc758 \ub2e8\uc810\uc744 \ubcf4\uc644\ud574 \uacc4\uc18d\ud574\uc11c \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uc720\uc9c0\ud560 \uc218 \uc788\uac8c \ud569\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654"},"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-2",src:n(6730).Z,width:"1356",height:"862"})),(0,i.kt)("p",null,"2\ub2e8\uacc4\uc758 \uc81c\ubaa9\uc740 CI\uc640 CD\uc758 \uc790\ub3d9\ud654 \uc785\ub2c8\ub2e4. DevOps\uc5d0\uc11c\uc758 CI/CD\uc758 \ub300\uc0c1\uc740 \uc18c\uc2a4 \ucf54\ub4dc\uc785\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 MLOps\ub294 \uc5b4\ub5a4 \uac83\uc774 CI/CD\uc758 \ub300\uc0c1\uc77c\uae4c\uc694?"),(0,i.kt)("p",null,"MLOps\uc758 CI/CD \ub300\uc0c1 \ub610\ud55c \uc18c\uc2a4 \ucf54\ub4dc\uc778 \uac83\uc740 \ub9de\uc9c0\ub9cc \uc870\uae08 \ub354 \uc5c4\ubc00\ud788 \uc815\uc758\ud558\uc790\uba74 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778\uc774\ub77c\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub798\uc11c \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc601\ud5a5\uc774 \uc788\ub294 \ubcc0\ud654\uc5d0 \ub300\ud574\uc11c \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ud559\uc2b5\uc774 \ub418\ub294\uc9c0 (CI), \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\ub294\uc9c0 (CD)\ub97c \ud655\uc778\ud574\uc57c \ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud559\uc2b5\uc744 \ud558\ub294 \ucf54\ub4dc\uc5d0 \uc9c1\uc811\uc801\uc778 \uc218\uc815\uc774 \uc788\ub294 \uacbd\uc6b0\uc5d0\ub294 CI/CD\ub97c \uc9c4\ud589\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucf54\ub4dc \uc678\uc5d0\ub3c4 \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804, \ud30c\uc774\uc36c\uc758 \ubc84\uc804 \ubcc0\uacbd\ub3c4 CI/CD\uc758 \ub300\uc0c1\uc785\ub2c8\ub2e4. \ub9ce\uc740 \uacbd\uc6b0 \uba38\uc2e0 \ub7ec\ub2dd\uc740 \uc624\ud508 \uc18c\uc2a4\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc624\ud508 \uc18c\uc2a4\ub294 \uadf8 \ud2b9\uc131\uc0c1 \ubc84\uc804\uc774 \ubc14\ub00c\uc5c8\uc744 \ub54c \ud568\uc218\uc758 \ub0b4\ubd80 \ub85c\uc9c1\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4. \ubb3c\ub860 \uc5b4\ub290 \uc815\ub3c4 \ubc84\uc804\uc774 \uc62c\ub77c \uac08 \ub54c \uc774\uc640 \uad00\ub828\ub41c \uc54c\ub9bc\uc744 \uc8fc\uc9c0\ub9cc \ud55c \ubc88\uc5d0 \ubc84\uc804\uc774 \ud06c\uac8c \ubc14\ub010\ub2e4\uba74 \uc774\ub7ec\ud55c \ubcc0\ud654\ub97c \ubaa8\ub97c \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\uc5d0\ub3c4 CI/CD\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubaa8\ub378\uc774 \ud559\uc2b5, \ub3d9\uc791\ud558\ub294\uc9c0 \ud655\uc778\uc744 \ud574\uc57c \ud569\ub2c8\ub2e4."))}d.isMDXComponent=!0},5450:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},3155:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},1972:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},6730:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},2422:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-2-b10bd2ae7445c3098c9f133131859466.png"},765:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},2412:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3870],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var l=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);t&&(l=l.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,l)}return n}function o(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=l.createContext({}),s=function(e){var t=l.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return l.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return l.createElement(l.Fragment,{},t)}},m=l.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,p=e.parentName,c=a(e,["components","mdxType","originalType","parentName"]),u=s(n),m=i,f=u["".concat(p,".").concat(m)]||u[m]||d[m]||r;return n?l.createElement(f,o(o({ref:t},c),{},{components:n})):l.createElement(f,o({ref:t},c))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,o=new Array(r);o[0]=m;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:i,o[1]=a;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>s});var l=n(7462),i=(n(7294),n(3905));const r={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},o=void 0,a={unversionedId:"introduction/levels",id:"version-1.0/introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/versioned_docs/version-1.0/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/docs/1.0/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/levels.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/docs/1.0/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/docs/1.0/introduction/component"}},p={},s=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4",id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4",level:2},{value:"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654",id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654",id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...r}=e;return(0,i.kt)(u,(0,l.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ubc1c\ud45c\ud55c MLOps\uc758 \ub2e8\uacc4\ub97c \ubcf4\uba70 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\uc740 \ubb34\uc5c7\uc778\uc9c0 \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \ubb34\ub824 2015\ub144\ubd80\ud130 MLOps\uc758 \ud544\uc694\uc131\uc744 \ub9d0\ud588\uc2b5\ub2c8\ub2e4. Hidden Technical Debt in Machine Learning Systems \uc740 \uadf8\ub7f0 \uad6c\uae00\uc758 \uc0dd\uac01\uc744 \ub2f4\uc740 \ub17c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper",src:n(765).Z,width:"840",height:"638"})),(0,i.kt)("p",null,"\uc774 \ub17c\ubb38\uc758 \ud575\uc2ec\uc740 \ubc14\ub85c \uba38\uc2e0\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc81c\ud488\uc744 \ub9cc\ub4dc\ub294\ub370 \uc788\uc5b4\uc11c \uba38\uc2e0\ub7ec\ub2dd \ucf54\ub4dc\ub294 \uc804\uccb4 \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc544\uc8fc \uc77c\ubd80\uc77c \ubfd0\uc774\ub77c\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"paper-2",src:n(2422).Z,width:"1186",height:"422"})),(0,i.kt)("p",null,"\uad6c\uae00\uc740 \uc774 \ub17c\ubb38\uc744 \ub354 \ubc1c\uc804\uc2dc\ucf1c\uc11c MLOps\ub77c\ub294 \uc6a9\uc5b4\ub97c \ub9cc\ub4e4\uc5b4 \ud655\uc7a5\uc2dc\ucf30\uc2b5\ub2c8\ub2e4. \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,i.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"\uad6c\uae00 \ud074\ub77c\uc6b0\ub4dc \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ubc88 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c\ub294 \uad6c\uae00\uc5d0\uc11c \ub9d0\ud558\ub294 MLOps\ub780 \uc5b4\ub5a4 \uac83\uc778\uc9c0\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud574\ubcf4\uace0\uc790 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uad6c\uae00\uc5d0\uc11c\ub294 MLOps\uc758 \ubc1c\uc804 \ub2e8\uacc4\ub97c \ucd1d 3(0~2)\ub2e8\uacc4\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4. \uac01 \ub2e8\uacc4\ub4e4\uc5d0 \ub300\ud574 \uc124\uba85\ud558\uae30 \uc55e\uc11c \uc774\uc804 \ud3ec\uc2a4\ud2b8\uc5d0\uc11c \uc124\uba85\ud588\ub358 \uac1c\ub150 \uc911 \ud544\uc694\ud55c \ubd80\ubd84\uc744 \ub2e4\uc2dc \ud55c\ubc88 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc6b4\uc601\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uacfc \ubc30\ud3ec \ubc0f \uc6b4\uc601\uc744 \ub2f4\ub2f9\ud558\ub294 \uc6b4\uc601\ud300\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \ud300\uc758 \uc6d0\ud560\ud55c \ud611\uc5c5\uc744 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud558\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc804\uc5d0\ub294 \uac04\ub2e8\ud788 Continuous Integration(CI)/Continuous Deployment(CD)\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\ub2e4\uace0 \ud558\uc600\ub294\ub370, \uc5b4\ub5bb\uac8c CI/CD\ub97c \ud558\ub294\uc9c0\uc5d0 \ub300\ud574\uc11c \uc54c\uc544 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"0\ub2e8\uacc4-\uc218\ub3d9-\ud504\ub85c\uc138\uc2a4"},"0\ub2e8\uacc4: \uc218\ub3d9 \ud504\ub85c\uc138\uc2a4"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-0",src:n(5450).Z,width:"1332",height:"494"})),(0,i.kt)("p",null,"0\ub2e8\uacc4\uc5d0\uc11c \ub450 \ud300\uc740 \u201c\ubaa8\ub378\u201d\uc744 \ud1b5\ud574 \uc18c\ud1b5\ud569\ub2c8\ub2e4. \uba38\uc2e0 \ub7ec\ub2dd\ud300\uc740 \uc313\uc5ec\uc788\ub294 \ub370\uc774\ud130\ub85c \ubaa8\ub378\uc744 \ud559\uc2b5\uc2dc\ud0a4\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc6b4\uc601\ud300\uc5d0\uac8c \uc804\ub2ec \ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uc774\ub807\uac8c \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"toon",src:n(2412).Z,width:"1282",height:"1746"})),(0,i.kt)("p",null,"\ucd08\uae30\uc758 \uba38\uc2e0 \ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc774 \u201c\ubaa8\ub378\u201d \uc911\uc2ec\uc758 \uc18c\ud1b5\uc744 \ud1b5\ud574 \ubc30\ud3ec\ud569\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uc774\ub7f0 \ubc30\ud3ec \ubc29\uc2dd\uc740 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.7\uc744 \uc4f0\uace0 \uc5b4\ub5a4 \uae30\ub2a5\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c 3.8\uc744 \uc4f4\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0c1\ud669\uc744 \uc790\uc8fc \ubaa9\uaca9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \uc0c1\ud669\uc774 \uc77c\uc5b4\ub098\ub294 \uc774\uc720\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud2b9\uc131\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \ud559\uc2b5\ub41c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uae30 \uc704\ud574\uc11c\ub294 3\uac00\uc9c0\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"\ud30c\uc774\uc36c \ucf54\ub4dc"),(0,i.kt)("li",{parentName:"ol"},"\ud559\uc2b5\ub41c \uac00\uc911\uce58"),(0,i.kt)("li",{parentName:"ol"},"\ud658\uacbd (\ud328\ud0a4\uc9c0, \ubc84\uc804 \ub4f1)")),(0,i.kt)("p",null,"\ub9cc\uc57d \uc774 3\uac00\uc9c0 \uc911 \ud55c \uac00\uc9c0\ub77c\ub3c4 \uc804\ub2ec\uc774 \uc798\ubabb \ub41c\ub2e4\uba74 \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\uac70\ub098 \uc608\uc0c1\ud558\uc9c0 \ubabb\ud55c \uc608\uce21\uc744 \ud560\uc218 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9ce\uc740 \uacbd\uc6b0 \ud658\uacbd\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc544\uc11c \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc740 \ub2e4\uc591\ud55c \uc624\ud508\uc18c\uc2a4\ub97c \uc0ac\uc6a9\ud558\ub294\ub370 \uc624\ud508\uc18c\uc2a4\ub294 \ud2b9\uc131\uc0c1 \uc5b4\ub5a4 \ubc84\uc804\uc744 \uc4f0\ub294\uc9c0\uc5d0 \ub530\ub77c\uc11c \uac19\uc740 \ud568\uc218\ub77c\ub3c4 \uacb0\uacfc\uac00 \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c \ubb38\uc81c\ub294 \uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uad00\ub9ac\ud560 \ubaa8\ub378\uc774 \ub9ce\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \uae08\ubc29 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\ub4e4\uc774 \ub9ce\uc544\uc9c0\uace0 \uc11c\ub85c \uc18c\ud1b5\uc5d0 \uc5b4\ub824\uc6c0\uc744 \uacaa\uac8c \ub41c\ub2e4\uba74 \uc131\ub2a5\uc774 \ub354 \uc88b\uc740 \ubaa8\ub378\uc744 \ube60\ub974\uac8c \ubc30\ud3ec\ud560 \uc218 \uc5c6\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"1\ub2e8\uacc4-ml-\ud30c\uc774\ud504\ub77c\uc778-\uc790\ub3d9\ud654"},"1\ub2e8\uacc4: ML \ud30c\uc774\ud504\ub77c\uc778 \uc790\ub3d9\ud654"),(0,i.kt)("h3",{id:"pipeline"},"Pipeline"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-pipeline",src:n(1972).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub798\uc11c MLOps\uc5d0\uc11c\ub294 \u201c\ud30c\uc774\ud504\ub77c\uc778(Pipeline)\u201d\uc744 \uc774\uc6a9\ud574 \uc774\ub7ec\ud55c \ubb38\uc81c\ub97c \ubc29\uc9c0\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4. MLOps\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub3c4\ucee4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc774\uc6a9\ud574 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ubaa8\ub378 \uac1c\ubc1c\uc5d0 \uc0ac\uc6a9\ud55c \uac83\uacfc \ub3d9\uc77c\ud55c \ud658\uacbd\uc73c\ub85c \ub3d9\uc791\ub418\ub294 \uac83\uc744 \ubcf4\uc7a5\ud569\ub2c8\ub2e4. \uc774\ub97c \ud1b5\ud574\uc11c \ud658\uacbd\uc774 \ub2ec\ub77c\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub7f0\ub370 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubc94\uc6a9\uc801\uc778 \uc6a9\uc5b4\ub85c \uc5ec\ub7ec \ub2e4\uc591\ud55c \ud0dc\uc2a4\ud06c\uc5d0\uc11c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc5ed\ud560\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \uc791\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud30c\uc774\ud504\ub77c\uc778 \ub300\uc2e0 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778(Training Pipeline)\uc774 \ub354 \uc815\ud655\ud558\ub2e4\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-1-ct.png",src:n(3155).Z,width:"1356",height:"942"})),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 Continuous Training(CT) \uac1c\ub150\uc774 \ucd94\uac00\ub429\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 CT\ub294 \uc65c \ud544\uc694\ud560\uae4c\uc694?"),(0,i.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,i.kt)("p",null,"Real World\uc5d0\uc11c \ub370\uc774\ud130\ub294 Data Shift\ub77c\ub294 \ub370\uc774\ud130\uc758 \ubd84\ud3ec\uac00 \uacc4\uc18d\ud574\uc11c \ubcc0\ud558\ub294 \ud2b9\uc9d5\uc774 \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c \uacfc\uac70\uc5d0 \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 \uc2dc\uac04\uc774 \uc9c0\ub0a8\uc5d0 \ub530\ub77c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc800\ud558\ub418\ub294 \ubb38\uc81c\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\ub294 \uac00\uc7a5 \uac04\ub2e8\ud558\uace0 \ud6a8\uacfc\uc801\uc778 \ud574\uacb0\ucc45\uc740 \ubc14\ub85c \ucd5c\uadfc \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \ubcc0\ud654\ub41c \ub370\uc774\ud130 \ubd84\ud3ec\uc5d0 \ub9de\ucdb0\uc11c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5\ud558\uba74 \ub2e4\uc2dc \uc900\uc218\ud55c \uc131\ub2a5\uc744 \ub0bc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc81c\uc870\uc5c5\uacfc \uac19\uc774 \ud55c \uacf5\uc7a5\uc5d0\uc11c \uc5ec\ub7ec \ub808\uc2dc\ud53c\ub97c \ucc98\ub9ac\ud558\ub294 \uacbd\uc6b0 \ubb34\uc870\uac74 \uc7ac\ud559\uc2b5\uc744 \ud558\ub294 \uac83\uc774 \uc88b\uc9c0 \uc54a\uc744 \uc218 \ub3c4 \uc788\uc2b5\ub2c8\ub2e4. Blind Spot\uc774 \ub300\ud45c\uc801\uc778 \uc608\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \uc790\ub3d9\ucc28 \uc0dd\uc0b0 \ub77c\uc778\uc5d0\uc11c \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uace0 \uc774\ub97c \uc774\uc6a9\ud574 \uc608\uce21\uc744 \uc9c4\ud589\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ub9cc\uc57d \uc804\ud600 \ub2e4\ub978 \ubaa8\ub378 B\uac00 \ub4e4\uc5b4\uc624\uba74 \uc774\uc804\uc5d0 \ubcf4\uc9c0 \ubabb\ud55c \ub370\uc774\ud130 \ud328\ud134\uc774\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\uc81c \ubaa8\ub378 B\uc5d0 \ub300\ud574\uc11c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uc5c8\uae30 \ub54c\ubb38\uc5d0 \ubaa8\ub378\uc740 \uc608\uce21\uc744 \uc9c4\ud589\ud560 \uac83 \uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d \ub370\uc774\ud130\uac00 \ub2e4\uc2dc \ubaa8\ub378 A\ub85c \ubc14\ub010\ub2e4\uba74 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d Retraining \uaddc\uce59\ub9cc \uc788\ub2e4\uba74 \ub2e4\uc2dc \ubaa8\ub378 A\uc5d0 \ub300\ud574\uc11c \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc774 \ucda9\ubd84\ud55c \uc131\ub2a5\uc744 \ubcf4\uc774\uae30 \uc704\ud574\uc11c\ub294 \ucda9\ubd84\ud55c \uc591\uc758 \ub370\uc774\ud130\uac00 \ubaa8\uc5ec\uc57c \ud569\ub2c8\ub2e4. Blind Spot\uc774\ub780 \uc774\ub807\uac8c \ub370\uc774\ud130\ub97c \ubaa8\uc73c\uae30 \uc704\ud574\uc11c \ubaa8\ub378\uc774 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294 \uad6c\uac04\uc744 \ub9d0\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ub7ec\ud55c Blind Spot\uc744 \ud574\uacb0\ud558\ub294 \ubc29\ubc95\uc740 \uac04\ub2e8\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ubc14\ub85c \ubaa8\ub378 A\uc5d0 \ub300\ud55c \ubaa8\ub378\uc774 \uacfc\uac70\uc5d0 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uace0 \ub9cc\uc57d \uc788\uc5c8\ub2e4\uba74 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc744 \ubc14\ub85c \ud559\uc2b5\ud558\uae30 \ubcf4\ub2e4\ub294 \uc774 \uc804 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \ub2e4\uc2dc \uc608\uce21\uc744 \ud558\uba74 \uc774\ub7f0 Blind Spot\uc744 \ud574\uacb0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ubaa8\ub378\uc640 \uac19\uc740 \uba54\ud0c0 \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 \ubaa8\ub378\uc744 \uc790\ub3d9\uc73c\ub85c \ubcc0\ud658\ud574\uc8fc\ub294 \uac83\uc744 Auto Deploy\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc815\ub9ac\ud558\uc790\uba74 CT\ub97c \uc704\ud574\uc11c\ub294 Auto Retraining\uc758\uacfc Auto Deploy \ub450 \uac00\uc9c0 \uae30\ub2a5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4. \ub458\uc740 \uc11c\ub85c\uc758 \ub2e8\uc810\uc744 \ubcf4\uc644\ud574 \uacc4\uc18d\ud574\uc11c \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uc720\uc9c0\ud560 \uc218 \uc788\uac8c \ud569\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"2\ub2e8\uacc4-cicd-\ud30c\uc774\ud504\ub77c\uc778\uc758-\uc790\ub3d9\ud654"},"2\ub2e8\uacc4: CI/CD \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc790\ub3d9\ud654"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"level-2",src:n(6730).Z,width:"1356",height:"862"})),(0,i.kt)("p",null,"2\ub2e8\uacc4\uc758 \uc81c\ubaa9\uc740 CI\uc640 CD\uc758 \uc790\ub3d9\ud654 \uc785\ub2c8\ub2e4. DevOps\uc5d0\uc11c\uc758 CI/CD\uc758 \ub300\uc0c1\uc740 \uc18c\uc2a4 \ucf54\ub4dc\uc785\ub2c8\ub2e4. \uadf8\ub807\ub2e4\uba74 MLOps\ub294 \uc5b4\ub5a4 \uac83\uc774 CI/CD\uc758 \ub300\uc0c1\uc77c\uae4c\uc694?"),(0,i.kt)("p",null,"MLOps\uc758 CI/CD \ub300\uc0c1 \ub610\ud55c \uc18c\uc2a4 \ucf54\ub4dc\uc778 \uac83\uc740 \ub9de\uc9c0\ub9cc \uc870\uae08 \ub354 \uc5c4\ubc00\ud788 \uc815\uc758\ud558\uc790\uba74 \ud559\uc2b5 \ud30c\uc774\ud504\ub77c\uc778\uc774\ub77c\uace0 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub798\uc11c \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294\ub370 \uc788\uc5b4\uc11c \uc601\ud5a5\uc774 \uc788\ub294 \ubcc0\ud654\uc5d0 \ub300\ud574\uc11c \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ud559\uc2b5\uc774 \ub418\ub294\uc9c0 (CI), \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\ub294\uc9c0 (CD)\ub97c \ud655\uc778\ud574\uc57c \ud569\ub2c8\ub2e4. \uadf8\ub798\uc11c \ud559\uc2b5\uc744 \ud558\ub294 \ucf54\ub4dc\uc5d0 \uc9c1\uc811\uc801\uc778 \uc218\uc815\uc774 \uc788\ub294 \uacbd\uc6b0\uc5d0\ub294 CI/CD\ub97c \uc9c4\ud589\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucf54\ub4dc \uc678\uc5d0\ub3c4 \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804, \ud30c\uc774\uc36c\uc758 \ubc84\uc804 \ubcc0\uacbd\ub3c4 CI/CD\uc758 \ub300\uc0c1\uc785\ub2c8\ub2e4. \ub9ce\uc740 \uacbd\uc6b0 \uba38\uc2e0 \ub7ec\ub2dd\uc740 \uc624\ud508 \uc18c\uc2a4\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc624\ud508 \uc18c\uc2a4\ub294 \uadf8 \ud2b9\uc131\uc0c1 \ubc84\uc804\uc774 \ubc14\ub00c\uc5c8\uc744 \ub54c \ud568\uc218\uc758 \ub0b4\ubd80 \ub85c\uc9c1\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4. \ubb3c\ub860 \uc5b4\ub290 \uc815\ub3c4 \ubc84\uc804\uc774 \uc62c\ub77c \uac08 \ub54c \uc774\uc640 \uad00\ub828\ub41c \uc54c\ub9bc\uc744 \uc8fc\uc9c0\ub9cc \ud55c \ubc88\uc5d0 \ubc84\uc804\uc774 \ud06c\uac8c \ubc14\ub010\ub2e4\uba74 \uc774\ub7ec\ud55c \ubcc0\ud654\ub97c \ubaa8\ub97c \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\uc758 \ubc84\uc804\uc774 \ubcc0\ud558\ub294 \uacbd\uc6b0\uc5d0\ub3c4 CI/CD\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubaa8\ub378\uc774 \ud559\uc2b5, \ub3d9\uc791\ud558\ub294\uc9c0 \ud655\uc778\uc744 \ud574\uc57c \ud569\ub2c8\ub2e4."))}d.isMDXComponent=!0},5450:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},3155:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},1972:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},6730:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},2422:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-2-b10bd2ae7445c3098c9f133131859466.png"},765:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},2412:(e,t,n)=>{n.d(t,{Z:()=>l});const l=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file diff --git a/assets/js/317f9d80.a0e6c39b.js b/assets/js/317f9d80.55c20a28.js similarity index 99% rename from assets/js/317f9d80.a0e6c39b.js rename to assets/js/317f9d80.55c20a28.js index 7ad86df1..0b256d4e 100644 --- a/assets/js/317f9d80.a0e6c39b.js +++ b/assets/js/317f9d80.55c20a28.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4119],{3905:(e,t,a)=>{a.d(t,{Zo:()=>m,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function l(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},m=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,l=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),u=p(a),k=r,d=u["".concat(s,".").concat(k)]||u[k]||c[k]||l;return a?n.createElement(d,o(o({ref:t},m),{},{components:a})):n.createElement(d,o({ref:t},m))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:r,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const l={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"version-1.0/setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],m={toc:p},u="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(u,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub4c8\uc744 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uacfc\uc815\uc5d0 \uad00\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \uc18c\uac1c\ub418\ub294 \uacfc\uc815\uc740 \ubaa8\ub450 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0\uc11c \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"helm"},"Helm"),(0,r.kt)("p",null,"Helm\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud328\ud0a4\uc9c0\uc640 \uad00\ub828\ub41c \uc790\uc6d0\uc744 \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 Helm v3.7.1 \ubc84\uc804\uc744 \ub0b4\ub824\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"\uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc2dc\uc5b4, \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc758 OS\uc640 CPU\uc5d0 \ub9de\ub294 \ubc14\uc774\ub108\ub9ac\uc758 \ub2e4\uc6b4 \uacbd\ub85c\ub97c \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"helm\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,r.kt)("p",{parentName:"li"},"Environment variables:"),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,r.kt)("p",{parentName:"li"},"..."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("h2",{id:"kustomize"},"Kustomize"),(0,r.kt)("p",null,"kustomize \ub610\ud55c \uc5ec\ub7ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kustomize v3.10.0 \ubc84\uc804\uc758 \ubc14\uc774\ub108\ub9ac\ub97c \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0"),"\uc5d0\uc11c \ud655\uc778 \ud6c4 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kustomize \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,r.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"CSI Plugin\uc740 kubernetes \ub0b4\uc758 \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \ub2f4\ub2f9\ud558\ub294 \ubaa8\ub4c8\uc785\ub2c8\ub2e4. \ub2e8\uc77c \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc27d\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 CSI Plugin\uc778 Local Path Provisioner\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub610\ud55c, \ub2e4\uc74c\uacfc \uac19\uc774 local-path-storage namespace \uc5d0 provisioner pod\uc774 Running \uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uc744 \uc218\ud589\ud558\uc5ec default storage class\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"default storage class\ub85c \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc774 NAME\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"local-path (default)")," \uc778 storage class\uac00 \uc874\uc7ac\ud558\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4119],{3905:(e,t,a)=>{a.d(t,{Zo:()=>m,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function l(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},m=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,l=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),u=p(a),k=r,d=u["".concat(s,".").concat(k)]||u[k]||c[k]||l;return a?n.createElement(d,o(o({ref:t},m),{},{components:a})):n.createElement(d,o({ref:t},m))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:r,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const l={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"version-1.0/setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],m={toc:p},u="wrapper";function c(e){let{components:t,...a}=e;return(0,r.kt)(u,(0,n.Z)({},m,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub4c8\uc744 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uacfc\uc815\uc5d0 \uad00\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \uc18c\uac1c\ub418\ub294 \uacfc\uc815\uc740 \ubaa8\ub450 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0\uc11c \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"helm"},"Helm"),(0,r.kt)("p",null,"Helm\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud328\ud0a4\uc9c0\uc640 \uad00\ub828\ub41c \uc790\uc6d0\uc744 \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 Helm v3.7.1 \ubc84\uc804\uc744 \ub0b4\ub824\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"\uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc2dc\uc5b4, \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc758 OS\uc640 CPU\uc5d0 \ub9de\ub294 \ubc14\uc774\ub108\ub9ac\uc758 \ub2e4\uc6b4 \uacbd\ub85c\ub97c \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"helm\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,r.kt)("p",{parentName:"li"},"Environment variables:"),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Name"),(0,r.kt)("th",{parentName:"tr",align:null},"Description"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,r.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,r.kt)("p",{parentName:"li"},"..."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("h2",{id:"kustomize"},"Kustomize"),(0,r.kt)("p",null,"kustomize \ub610\ud55c \uc5ec\ub7ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ud55c \ubc88\uc5d0 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ub3c4\uc640\uc8fc\ub294 \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc9d5 \ub3c4\uad6c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kustomize v3.10.0 \ubc84\uc804\uc758 \ubc14\uc774\ub108\ub9ac\ub97c \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"For Linux amd64"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\ub978 OS\ub294 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0"),"\uc5d0\uc11c \ud655\uc778 \ud6c4 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("ol",{start:2},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kustomize \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc555\ucd95\uc744 \ud480\uace0, \ud30c\uc77c\uc758 \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,r.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"CSI Plugin\uc740 kubernetes \ub0b4\uc758 \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \ub2f4\ub2f9\ud558\ub294 \ubaa8\ub4c8\uc785\ub2c8\ub2e4. \ub2e8\uc77c \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc27d\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 CSI Plugin\uc778 Local Path Provisioner\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub610\ud55c, \ub2e4\uc74c\uacfc \uac19\uc774 local-path-storage namespace \uc5d0 provisioner pod\uc774 Running \uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uc744 \uc218\ud589\ud558\uc5ec default storage class\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"default storage class\ub85c \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc774 NAME\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"local-path (default)")," \uc778 storage class\uac00 \uc874\uc7ac\ud558\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/3546d36b.9935fec7.js b/assets/js/3546d36b.a10369c4.js similarity index 98% rename from assets/js/3546d36b.9935fec7.js rename to assets/js/3546d36b.a10369c4.js index 879ae263..e60d2c67 100644 --- a/assets/js/3546d36b.9935fec7.js +++ b/assets/js/3546d36b.a10369c4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8677],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var p=n.createContext({}),s=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),u=s(r),k=a,m=u["".concat(p,".").concat(k)]||u[k]||d[k]||l;return r?n.createElement(m,o(o({ref:t},c),{},{components:r})):n.createElement(m,o({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=k;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[u]="string"==typeof e?e:a,o[1]=i;for(var s=2;s{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>i,toc:()=>s});var n=r(7462),a=(r(7294),r(3905));const l={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/install",id:"prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/docs/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/docs/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/install.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/docs/prerequisites/docker/introduction"}},p={},s=[{value:"Docker",id:"docker",level:2},{value:"\uc124\uce58 \ud655\uc778",id:"\uc124\uce58-\ud655\uc778",level:2},{value:"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c..",id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(u,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc2e4\uc2b5\uc744 \uc704\ud574 \ub3c4\ucee4\ub97c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc124\uce58\ub294 \uc5b4\ub5a4 OS\ub97c \uc0ac\uc6a9\ud558\ub294\uc9c0\uc5d0 \ub530\ub77c \ub2ec\ub77c\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uac01 \ud658\uacbd\uc5d0 \ub9de\ub294 \ub3c4\ucee4 \uc124\uce58\ub294 \uacf5\uc2dd \ud648\ud398\uc774\uc9c0\ub97c \ucc38\uace0\ud574\uc8fc\uc138\uc694."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"\uc124\uce58-\ud655\uc778"},"\uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294 OS, \ud130\ubbf8\ub110 \ud658\uacbd\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c"},"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c.."),(0,a.kt)("p",null,"MLOps\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574 \ud544\uc694\ud55c \ub3c4\ucee4 \uc0ac\uc6a9\ubc95\uc744 \uc124\uba85\ud558\ub2c8 \ub9ce\uc740 \ube44\uc720\uc640 \uc608\uc2dc\uac00 MLOps \ucabd\uc73c\ub85c \uce58\uc911\ub418\uc5b4 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8677],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var p=n.createContext({}),s=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},c=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),u=s(r),k=a,m=u["".concat(p,".").concat(k)]||u[k]||d[k]||l;return r?n.createElement(m,o(o({ref:t},c),{},{components:r})):n.createElement(m,o({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=k;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[u]="string"==typeof e?e:a,o[1]=i;for(var s=2;s{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>i,toc:()=>s});var n=r(7462),a=(r(7294),r(3905));const l={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/install",id:"prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/docs/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/docs/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/install.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/docs/prerequisites/docker/introduction"}},p={},s=[{value:"Docker",id:"docker",level:2},{value:"\uc124\uce58 \ud655\uc778",id:"\uc124\uce58-\ud655\uc778",level:2},{value:"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c..",id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(u,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc2e4\uc2b5\uc744 \uc704\ud574 \ub3c4\ucee4\ub97c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc124\uce58\ub294 \uc5b4\ub5a4 OS\ub97c \uc0ac\uc6a9\ud558\ub294\uc9c0\uc5d0 \ub530\ub77c \ub2ec\ub77c\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uac01 \ud658\uacbd\uc5d0 \ub9de\ub294 \ub3c4\ucee4 \uc124\uce58\ub294 \uacf5\uc2dd \ud648\ud398\uc774\uc9c0\ub97c \ucc38\uace0\ud574\uc8fc\uc138\uc694."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"\uc124\uce58-\ud655\uc778"},"\uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294 OS, \ud130\ubbf8\ub110 \ud658\uacbd\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c"},"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c.."),(0,a.kt)("p",null,"MLOps\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574 \ud544\uc694\ud55c \ub3c4\ucee4 \uc0ac\uc6a9\ubc95\uc744 \uc124\uba85\ud558\ub2c8 \ub9ce\uc740 \ube44\uc720\uc640 \uc608\uc2dc\uac00 MLOps \ucabd\uc73c\ub85c \uce58\uc911\ub418\uc5b4 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/36614f1b.9e4f3599.js b/assets/js/36614f1b.2c78bdaf.js similarity index 99% rename from assets/js/36614f1b.9e4f3599.js rename to assets/js/36614f1b.2c78bdaf.js index 8d5a836a..162348d1 100644 --- a/assets/js/36614f1b.9e4f3599.js +++ b/assets/js/36614f1b.2c78bdaf.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5865],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var a=r.createContext({}),s=function(e){var t=r.useContext(a),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(a.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},v=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,a=e.parentName,c=p(e,["components","mdxType","originalType","parentName"]),d=s(n),v=i,f=d["".concat(a,".").concat(v)]||d[v]||u[v]||o;return n?r.createElement(f,l(l({ref:t},c),{},{components:n})):r.createElement(f,l({ref:t},c))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,l=new Array(o);l[0]=v;var p={};for(var a in t)hasOwnProperty.call(t,a)&&(p[a]=t[a]);p.originalType=e,p[d]="string"==typeof e?e:i,l[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>a,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>p,toc:()=>s});var r=n(7462),i=(n(7294),n(3905));const o={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"introduction/intro",id:"version-1.0/introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/versioned_docs/version-1.0/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/docs/1.0/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/docs/1.0/introduction/levels"}},a={},s=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML+Ops",id:"1-mlops",level:3},{value:"Rule Based",id:"rule-based",level:4},{value:"Machine Learning",id:"machine-learning",level:4},{value:"Deep Learning",id:"deep-learning",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) \uacb0\ub860",id:"3-\uacb0\ub860",level:3}],c={toc:s},d="wrapper";function u(e){let{components:t,...o}=e;return(0,i.kt)(d,(0,r.Z)({},c,o,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,i.kt)("p",null,"2012\ub144 Alexnet \uc774\ud6c4 CV, NLP\ub97c \ube44\ub86f\ud558\uc5ec \ub370\uc774\ud130\uac00 \uc874\uc7ac\ud558\ub294 \ub3c4\uba54\uc778\uc774\ub77c\uba74 \uc5b4\ub514\uc11c\ub4e0 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \ub3c4\uc785\ud558\uace0\uc790 \ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc740 AI\ub77c\ub294 \ub2e8\uc5b4\ub85c \ubb36\uc774\uba70 \ubd88\ub838\uace0 \ub9ce\uc740 \ub9e4\uccb4\uc5d0\uc11c AI\uc758 \ud544\uc694\uc131\uc744 \uc678\ucce4\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \ubb34\uc218\ud788 \ub9ce\uc740 \uae30\uc5c5\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc218\ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\ub97c \uc9c4\ud589\ud558\uc600\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uadf8 \uacb0\uacfc\ub294 \uc5b4\ub5bb\uac8c \ub418\uc5c8\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uc5d8\ub9ac\uba3c\ud2b8 AI\uc758 \uc74c\ubcd1\ucc2c \ub3d9\ubd81\uc544 \uc9c0\uc5ed \ucd1d\uad04\ucc45\uc784\uc790\ub294 ",(0,i.kt)("a",{parentName:"p",href:"https://zdnet.co.kr/view/?no=20200611062002"},(0,i.kt)("em",{parentName:"a"},'"10\uac1c \uae30\uc5c5\uc5d0 AI \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud55c\ub2e4\uba74 \uadf8\uc911 9\uac1c\ub294 \ucee8\uc149\uac80\uc99d(POC)\ub9cc \ud558\ub2e4 \ub05d\ub09c\ub2e4"')),"\uace0 \ub9d0\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ucc98\ub7fc \ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uc774 \ubb38\uc81c\ub97c \ud480 \uc218 \uc788\uc744 \uac83 \uac19\ub2e4\ub294 \uac00\ub2a5\uc131\ub9cc\uc744 \ubcf4\uc5ec\uc8fc\uace0 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \uc774 \uc2dc\uae30\ucbe4\uc5d0 ",(0,i.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI\uc5d0 \ub2e4\uc2dc \uaca8\uc6b8"),"\uc774 \ub2e4\uac00\uc624\uace0 \uc788\ub2e4\ub294 \uc804\ub9dd\ub3c4 \ub098\uc624\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc65c \ud504\ub85c\uc81d\ud2b8 \ub300\ubd80\ubd84\uc774 \ucee8\uc149\uac80\uc99d(POC) \ub2e8\uacc4\uc5d0\uc11c \ub05d\ub0ac\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd \ucf54\ub4dc\ub9cc\uc73c\ub85c\ub294 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub97c \uc6b4\uc601\ud560 \uc218 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc2e4\uc81c \uc11c\ube44\uc2a4 \ub2e8\uacc4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc758 \ucf54\ub4dc\uac00 \ucc28\uc9c0\ud558\ub294 \ubd80\ubd84\uc740 \uc0dd\uac01\ubcf4\ub2e4 \ud06c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ub2e8\uc21c\ud788 \ubaa8\ub378\uc758 \uc131\ub2a5\ub9cc\uc774 \uc544\ub2cc \ub2e4\ub978 \ub9ce\uc740 \ubd80\ubd84\uc744 \uace0\ub824\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uad6c\uae00\uc740 \uc774\ub7f0 \ubb38\uc81c\ub97c 2015\ub144 ",(0,i.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),"\uc5d0\uc11c \uc9c0\uc801\ud55c \ubc14 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uc774 \ub17c\ubb38\uc774 \ub098\uc62c \ub2f9\uc2dc\uc5d0\ub294 \uc544\uc9c1 \ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc758 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud558\uae30 \ubc14\uc05c \uc2dc\uae30\uc600\uae30 \ub54c\ubb38\uc5d0, \ub17c\ubb38\uc774 \uc9c0\uc801\ud558\ub294 \ubc14\uc5d0 \ub9ce\uc740 \uc8fc\uc758\ub97c \uae30\uc6b8\uc774\uc9c0\ub294 \uc54a\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 \uba87 \ub144\uc774 \uc9c0\ub09c \ud6c4 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud574\ub0b4\uc5b4, \uc774\uc81c \uc0ac\ub78c\ub4e4\uc740 \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0 \uc801\uc6a9\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uace7 \ub9ce\uc740 \uc0ac\ub78c\uc774 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub294 \uc27d\uc9c0 \uc54a\ub2e4\ub294 \uac83\uc744 \uae68\ub2ec\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"devops"},"Devops"),(0,i.kt)("p",null,"MLOps\ub294 \uc774\uc804\uc5d0 \uc5c6\ub358 \uc0c8\ub85c\uc6b4 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c DevOps\ub77c\uace0 \ubd88\ub9ac\ub294 \uac1c\ubc1c \ubc29\ubc95\ub860\uc5d0\uc11c \ud30c\uc0dd\ub41c \ub2e8\uc5b4\uc785\ub2c8\ub2e4. \uadf8\ub807\uae30\uc5d0 DevOps\ub97c \uc774\ud574\ud55c\ub2e4\uba74 MLOps\ub97c \uc774\ud574\ud558\ub294 \ub370 \ub3c4\uc6c0\uc774 \ub429\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"devops-1"},"DevOps"),(0,i.kt)("p",null,"DevOps\ub294 Development(\uac1c\ubc1c)\uc640 Operations(\uc6b4\uc601)\uc758 \ud569\uc131\uc5b4\ub85c \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc758 \uac1c\ubc1c(Development)\uacfc \uc6b4\uc601(Operations)\uc758 \ud569\uc131\uc5b4\ub85c\uc11c \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c\uc790\uc640 \uc815\ubcf4\uae30\uc220 \uc804\ubb38\uac00 \uac04\uc758 \uc18c\ud1b5, \ud611\uc5c5 \ubc0f \ud1b5\ud569\uc744 \uac15\uc870\ud558\ub294 \uac1c\ubc1c \ud658\uacbd\uc774\ub098 \ubb38\ud654\ub97c \ub9d0\ud569\ub2c8\ub2e4.\nDevOps\uc758 \ubaa9\uc801\uc740 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \uc870\uc9c1\uacfc \uc6b4\uc601 \uc870\uc9c1\uac04\uc758 \uc0c1\ud638 \uc758\uc874\uc801 \ub300\uc751\uc774\uba70 \uc870\uc9c1\uc774 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uacfc \uc11c\ube44\uc2a4\ub97c \ube60\ub978 \uc2dc\uac04\uc5d0 \uac1c\ubc1c \ubc0f \ubc30\ud3ec\ud558\ub294 \uac83\uc744 \ubaa9\uc801\uc73c\ub85c \ud569\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,i.kt)("p",null,"\uadf8\ub7fc \uac04\ub2e8\ud55c \uc0c1\ud669 \uc124\uba85\uc744 \ud1b5\ud574 DevOps\uac00 \uc65c \ud544\uc694\ud55c\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uc9c0\uc6d0\ud558\ub294 \uae30\ub2a5\uc774 \ub9ce\uc9c0 \uc54a\uc73c\uba70 \ud300 \ub610\ub294 \ud68c\uc0ac\uc758 \uaddc\ubaa8\uac00 \uc791\uc2b5\ub2c8\ub2e4. \uc774\ub54c\uc5d0\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uad6c\ubd84\uc774 \uc5c6\uac70\ub098 \uc791\uc740 \uaddc\ubaa8\uc758 \ud300\uc73c\ub85c \uad6c\ubd84\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \ud575\uc2ec\uc740 \uaddc\ubaa8\uac00 \uc791\ub2e4\ub294 \uac83\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub54c\ub294 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \uc811\uc810\uc774 \ub9ce\uace0, \uc9d1\uc911\ud574\uc57c \ud558\ub294 \uc11c\ube44\uc2a4\uac00 \uc801\uae30 \ub54c\ubb38\uc5d0 \ube60\ub974\uac8c \uc11c\ube44\uc2a4\ub97c \uac1c\uc120\ud574 \ub098\uac08 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \ucee4\uc9c8\uc218\ub85d \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc740 \ubd84\ub9ac\ub418\uace0 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \ucc44\ub110\uc758 \ubb3c\ub9ac\uc801\uc778 \ud55c\uacc4\uac00 \uc624\uac8c \ub429\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\ub978 \ud300\uacfc \ud568\uaed8\ud558\ub294 \ubbf8\ud305\uc5d0 \ud300\uc6d0 \uc804\uccb4\uac00 \ubbf8\ud305\uc744 \ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c \uac01 \ud300\uc758 \ud300\uc7a5 \ud639\uc740 \uc18c\uc218\uc758 \uc2dc\ub2c8\uc5b4\ub9cc \ucc38\uc11d\ud558\uc5ec \ubbf8\ud305\uc744 \uc9c4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7f0 \uc18c\ud1b5 \ucc44\ub110\uc758 \ud55c\uacc4\ub294 \ud544\uc5f0\uc801\uc73c\ub85c \uc18c\ud1b5\uc758 \ubd80\uc7ac\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7ec\ub2e4 \ubcf4\uba74 \uac1c\ubc1c\ud300\uc740 \uc0c8\ub85c\uc6b4 \uae30\ub2a5\ub4e4\uc744 \uacc4\uc18d\ud574\uc11c \uac1c\ubc1c\ud558\uace0 \uc6b4\uc601\ud300 \uc785\uc7a5\uc5d0\uc11c\ub294 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \ubc30\ud3ec \uc2dc \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\ub294 \ub4f1 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc704\uc640 \uac19\uc740 \uc0c1\ud669\uc774 \ubc18\ubcf5\ub418\uba74 \uc870\uc9c1 \uc774\uae30\uc8fc\uc758\ub77c\uace0 \ubd88\ub9ac\ub294 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"silo",src:n(745).Z,width:"892",height:"498"})),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"\uc0ac\uc77c\ub85c(silo)\ub294 \uace1\uc2dd\uc774\ub098 \uc0ac\ub8cc\ub97c \uc800\uc7a5\ud558\ub294 \uad74\ub69d \ubaa8\uc591\uc758 \ucc3d\uace0\ub97c \uc758\ubbf8\ud55c\ub2e4. \uc0ac\uc77c\ub85c\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc874\uc7ac\ud558\uba70 \uc800\uc7a5\ub418\ub294 \ubb3c\ud488\uc774 \uc11c\ub85c \uc11e\uc774\uc9c0 \uc54a\ub3c4\ub85d \ucca0\uc800\ud788 \uad00\ub9ac\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc900\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc0ac\uc77c\ub85c \ud6a8\uacfc(Organizational Silos Effect)\ub294 \uc870\uc9c1 \ubd80\uc11c \uac04\uc5d0 \uc11c\ub85c \ud611\ub825\ud558\uc9c0 \uc54a\uace0 \ub0b4\ubd80 \uc774\uc775\ub9cc\uc744 \ucd94\uad6c\ud558\ub294 \ud604\uc0c1\uc744 \uc758\ubbf8\ud55c\ub2e4. \uc870\uc9c1 \ub0b4\uc5d0\uc11c \uac1c\ubcc4 \ubd80\uc11c\ub07c\ub9ac \uc11c\ub85c \ub2f4\uc744 \uc313\uace0 \uac01\uc790\uc758 \uc774\uc775\uc5d0\ub9cc \ubab0\ub450\ud558\ub294 \ubd80\uc11c \uc774\uae30\uc8fc\uc758\ub97c \uc77c\uceeb\ub294\ub2e4.")),(0,i.kt)("p",null,"\uc0ac\uc77c\ub85c \ud604\uc0c1\uc740 \uc11c\ube44\uc2a4 \ud488\uc9c8\uc758 \uc800\ud558\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7ec\ud55c \uc0ac\uc77c\ub85c \ud604\uc0c1\uc744 \ud574\uacb0\ud558\uae30 \uc704\ud574 \ub098\uc628 \uac83\uc774 \ubc14\ub85c DevOps\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"cicd"},"CI/CD"),(0,i.kt)("p",null,"Continuous Integration(CI) \uc640 Continuous Delivery (CD)\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uc7a5\ubcbd\uc744 \ud574\uc81c\ud558\uae30 \uc704\ud55c \uad6c\uccb4\uc801\uc778 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"cicd",src:n(3984).Z,width:"1400",height:"299"})),(0,i.kt)("p",null,"\uc774 \ubc29\ubc95\uc744 \ud1b5\ud574\uc11c \uac1c\ubc1c\ud300\uc5d0\uc11c\ub294 \uc6b4\uc601\ud300\uc758 \ud658\uacbd\uc744 \uc774\ud574\ud558\uace0 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c \uc911\uc778 \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uae4c\uc9c0 \uc774\uc5b4\uc9c8 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uac80\uc99d\ub41c \uae30\ub2a5 \ub610\ub294 \uac1c\uc120\ub41c \uc81c\ud488\uc744 \ub354 \uc790\uc8fc \ubc30\ud3ec\ud574 \uace0\uac1d\uc758 \uc81c\ud488 \uacbd\ud5d8\uc744 \uc0c1\uc2b9\uc2dc\ud0b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \uc885\ud569\ud558\uc790\uba74 DevOps\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300 \uac04\uc758 \ubb38\uc81c\uac00 \uc788\uc5c8\uace0 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\ub860\uc785\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"mlops"},"MLOps"),(0,i.kt)("h3",{id:"1-mlops"},"1) ML+Ops"),(0,i.kt)("p",null,"MLOps\ub294 Machine Learning \uacfc Operations\uc758 \ud569\uc131\uc5b4\ub85c DevOps\uc5d0\uc11c Dev\uac00 ML\ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc81c \uc55e\uc5d0\uc11c \uc0b4\ud3b4\ubcf8 DevOps\ub97c \ud1b5\ud574 MLOps\uac00 \ubb34\uc5c7\uc778\uc9c0 \uc9d0\uc791\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\u201cMLOps\ub294 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc758 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\uc785\ub2c8\ub2e4.\u201d\n\uc774 \ub9d0\uc740 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300 \uc0ac\uc774\uc5d0 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\ub2e4\ub294 \uc758\ubbf8\uc785\ub2c8\ub2e4. \uadf8\ub7fc \uc65c \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc5d0\ub294 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\uc744\uae4c\uc694? \ub450 \ud300 \uac04\uc758 \ubb38\uc81c\ub97c \uc54c\uc544\ubcf4\uae30 \uc704\ud574\uc11c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \uc608\uc2dc\ub85c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"rule-based"},"Rule Based"),(0,i.kt)("p",null,"\ucc98\uc74c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \ub9cc\ub4dc\ub294 \uacbd\uc6b0 \uac04\ub2e8\ud55c \uaddc\uce59\uc744 \uae30\ubc18\uc73c\ub85c \uc544\uc774\ud15c\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c 1\uc8fc\uc77c\uac04 \ud310\ub9e4\ub7c9\uc774 \uac00\uc7a5 \ub9ce\uc740 \uc21c\uc11c\ub300\ub85c \ubcf4\uc5ec\uc8fc\ub294 \uc2dd\uc758 \ubc29\uc2dd\uc744 \uc774\uc6a9\ud569\ub2c8\ub2e4. \uc774 \ubc29\uc2dd\uc73c\ub85c \ubaa8\ub378\uc744 \uc815\ud55c\ub2e4\uba74 \ud2b9\ubcc4\ud55c \uc774\uc720\uac00 \uc5c6\ub294 \uc774\uc0c1 \ubaa8\ub378\uc758 \uc218\uc815\uc774 \ud544\uc694 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"machine-learning"},"Machine Learning"),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \uc870\uae08 \ucee4\uc9c0\uace0 \ub85c\uadf8 \ub370\uc774\ud130\uac00 \ub9ce\uc774 \uc313\uc778\ub2e4\uba74 \uc774\ub97c \uc774\uc6a9\ud574 \uc544\uc774\ud15c \uae30\ubc18 \ud639\uc740 \uc720\uc800 \uae30\ubc18\uc758 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4. \uc774\ub54c \ubaa8\ub378\uc740 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"deep-learning"},"Deep Learning"),(0,i.kt)("p",null,"\uac1c\uc778\ud654 \ucd94\ucc9c\uc5d0 \ub300\ud55c \uc694\uad6c\uac00 \ub354 \ucee4\uc9c0\uace0 \ub354 \uc88b\uc740 \uc131\ub2a5\uc744 \ub0b4\ub294 \ubaa8\ub378\uc744 \ud544\uc694\ud574\uc9c8 \uacbd\uc6b0 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \uc774\ub54c \ub9cc\ub4dc\ub294 \ubaa8\ub378\uc740 \uba38\uc2e0\ub7ec\ub2dd\uacfc \uac19\uc774 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"graph",src:n(4204).Z,width:"752",height:"582"})),(0,i.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc744 x\ucd95\uc744 \ubaa8\ub378\uc758 \ubcf5\uc7a1\ub3c4, y\ucd95\uc744 \ubaa8\ub378\uc758 \uc131\ub2a5\uc73c\ub85c \ub450\uace0 \uadf8\ub798\ud504\ub85c \ud45c\ud604\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf5\uc7a1\ub3c4\uac00 \uc62c\ub77c\uac08 \ub54c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc62c\ub77c\uac00\ub294 \uc0c1\uc2b9 \uad00\uacc4\ub97c \uac16\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ub525\ub7ec\ub2dd\uc73c\ub85c \ub118\uc5b4\uac08 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc774 \uc0c8\ub85c \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ub9cc\uc57d \uad00\ub9ac\ud574\uc57c\ud560 \ubaa8\ub378\uc774 \uc801\ub2e4\uba74 \uc11c\ub85c \ud611\uc5c5\uc744 \ud1b5\ud574\uc11c \ucda9\ubd84\ud788 \ud574\uacb0\ud560 \uc218 \uc788\uc9c0\ub9cc \uac1c\ubc1c\ud574\uc57c \ud560 \ubaa8\ub378\uc774 \ub9ce\uc544\uc9c4\ub2e4\uba74 DevOps\uc758 \uacbd\uc6b0\uc640 \uac19\uc774 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \ub098\ud0c0\ub098\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"DevOps\uc758 \ubaa9\ud45c\uc640 \ub9de\ucdb0\uc11c \uc0dd\uac01\ud574\ubcf4\uba74 MLOps\uc758 \ubaa9\ud45c\ub294 \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud14c\uc2a4\ud2b8\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc774 DevOps\uc758 \ubaa9\ud45c\uc600\ub2e4\uba74, MLOps\uc758 \ubaa9\ud45c\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \ucd5c\uadfc \ub098\uc624\uace0 \uc788\ub294 MLOps \uad00\ub828 \uc81c\ud488\uacfc \uc124\uba85\uc744 \ubcf4\uba74 \uaf2d \uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ubaa9\ud45c\ub9cc\uc744 \ub300\uc0c1\uc73c\ub85c \ud558\uace0 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc5b4\ub5a4 \uacbd\uc6b0\uc5d0\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \ub9cc\ub4e0 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub824\uace0 \ud569\ub2c8\ub2e4. \uc774\ub7ec\ud55c \ub2c8\uc988\ub294 \ucd5c\uadfc \uba38\uc2e0\ub7ec\ub2dd \ud504\ub85c\uc81d\ud2b8\uac00 \uc9c4\ud589\ub418\ub294 \uacfc\uc815\uc5d0\uc11c \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc758 \uacbd\uc6b0 \uc6b4\uc601\uc5d0\uc11c \uac04\ub2e8\ud55c \ubaa8\ub378\ubd80\ud130 \uc2dc\uc791\ud574 \uc6b4\uc601\ud560 \uc218 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc790\uc5f0\uc5b4, \uc774\ubbf8\uc9c0\uc640 \uac19\uc740 \uacf3\uc5d0\uc11c\ub294 \uaddc\uce59 \uae30\ubc18\uc758 \ubaa8\ub378\ubcf4\ub2e4\ub294 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud574 \uc8fc\uc5b4\uc9c4 \ud0dc\uc2a4\ud06c\ub97c \ud574\uacb0\ud560 \uc218 \uc788\ub294\uc9c0 \uac80\uc99d(POC)\ub97c \uc120\ud589\ud558\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uac80\uc99d\uc774 \ub05d\ub09c \ud504\ub85c\uc81d\ud2b8\ub294 \uc774\uc81c \uc11c\ube44\uc2a4\ub97c \uc704\ud55c \uc6b4\uc601 \ud658\uacbd\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uba38\uc2e0\ub7ec\ub2dd \ud300 \ub0b4\uc758 \uc790\uccb4 \uc5ed\ub7c9\uc73c\ub85c\ub294 \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc27d\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud55c \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"3-\uacb0\ub860"},"3) \uacb0\ub860"),(0,i.kt)("p",null,"\uc694\uc57d\ud558\uc790\uba74 MLOps\ub294 \ub450 \uac00\uc9c0 \ubaa9\ud45c\uac00 \uc788\uc2b5\ub2c8\ub2e4.\n\uc55e\uc5d0\uc11c \uc124\uba85\ud55c MLOps\ub294 ML+Ops \ub85c \ub450 \ud300\uc758 \uc0dd\uc0b0\uc131 \ud5a5\uc0c1\uc744 \uc704\ud55c \uac83\uc774\uc600\uc2b5\ub2c8\ub2e4.\n\ubc18\uba74, \ub4a4\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc740 ML->Ops \ub85c \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 \uac83\uc744 \ub9d0\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},3984:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},4204:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},745:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5865],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var a=r.createContext({}),s=function(e){var t=r.useContext(a),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(a.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},v=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,o=e.originalType,a=e.parentName,c=p(e,["components","mdxType","originalType","parentName"]),d=s(n),v=i,f=d["".concat(a,".").concat(v)]||d[v]||u[v]||o;return n?r.createElement(f,l(l({ref:t},c),{},{components:n})):r.createElement(f,l({ref:t},c))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var o=n.length,l=new Array(o);l[0]=v;var p={};for(var a in t)hasOwnProperty.call(t,a)&&(p[a]=t[a]);p.originalType=e,p[d]="string"==typeof e?e:i,l[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>a,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>p,toc:()=>s});var r=n(7462),i=(n(7294),n(3905));const o={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"introduction/intro",id:"version-1.0/introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/versioned_docs/version-1.0/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/docs/1.0/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/docs/1.0/introduction/levels"}},a={},s=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML+Ops",id:"1-mlops",level:3},{value:"Rule Based",id:"rule-based",level:4},{value:"Machine Learning",id:"machine-learning",level:4},{value:"Deep Learning",id:"deep-learning",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) \uacb0\ub860",id:"3-\uacb0\ub860",level:3}],c={toc:s},d="wrapper";function u(e){let{components:t,...o}=e;return(0,i.kt)(d,(0,r.Z)({},c,o,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,i.kt)("p",null,"2012\ub144 Alexnet \uc774\ud6c4 CV, NLP\ub97c \ube44\ub86f\ud558\uc5ec \ub370\uc774\ud130\uac00 \uc874\uc7ac\ud558\ub294 \ub3c4\uba54\uc778\uc774\ub77c\uba74 \uc5b4\ub514\uc11c\ub4e0 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \ub3c4\uc785\ud558\uace0\uc790 \ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc740 AI\ub77c\ub294 \ub2e8\uc5b4\ub85c \ubb36\uc774\uba70 \ubd88\ub838\uace0 \ub9ce\uc740 \ub9e4\uccb4\uc5d0\uc11c AI\uc758 \ud544\uc694\uc131\uc744 \uc678\ucce4\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \ubb34\uc218\ud788 \ub9ce\uc740 \uae30\uc5c5\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \uc218\ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\ub97c \uc9c4\ud589\ud558\uc600\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uadf8 \uacb0\uacfc\ub294 \uc5b4\ub5bb\uac8c \ub418\uc5c8\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uc5d8\ub9ac\uba3c\ud2b8 AI\uc758 \uc74c\ubcd1\ucc2c \ub3d9\ubd81\uc544 \uc9c0\uc5ed \ucd1d\uad04\ucc45\uc784\uc790\ub294 ",(0,i.kt)("a",{parentName:"p",href:"https://zdnet.co.kr/view/?no=20200611062002"},(0,i.kt)("em",{parentName:"a"},'"10\uac1c \uae30\uc5c5\uc5d0 AI \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud55c\ub2e4\uba74 \uadf8\uc911 9\uac1c\ub294 \ucee8\uc149\uac80\uc99d(POC)\ub9cc \ud558\ub2e4 \ub05d\ub09c\ub2e4"')),"\uace0 \ub9d0\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc774\ucc98\ub7fc \ub9ce\uc740 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uc774 \ubb38\uc81c\ub97c \ud480 \uc218 \uc788\uc744 \uac83 \uac19\ub2e4\ub294 \uac00\ub2a5\uc131\ub9cc\uc744 \ubcf4\uc5ec\uc8fc\uace0 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \uc774 \uc2dc\uae30\ucbe4\uc5d0 ",(0,i.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI\uc5d0 \ub2e4\uc2dc \uaca8\uc6b8"),"\uc774 \ub2e4\uac00\uc624\uace0 \uc788\ub2e4\ub294 \uc804\ub9dd\ub3c4 \ub098\uc624\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc65c \ud504\ub85c\uc81d\ud2b8 \ub300\ubd80\ubd84\uc774 \ucee8\uc149\uac80\uc99d(POC) \ub2e8\uacc4\uc5d0\uc11c \ub05d\ub0ac\uc744\uae4c\uc694?",(0,i.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd \ucf54\ub4dc\ub9cc\uc73c\ub85c\ub294 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub97c \uc6b4\uc601\ud560 \uc218 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc2e4\uc81c \uc11c\ube44\uc2a4 \ub2e8\uacc4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc758 \ucf54\ub4dc\uac00 \ucc28\uc9c0\ud558\ub294 \ubd80\ubd84\uc740 \uc0dd\uac01\ubcf4\ub2e4 \ud06c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ub2e8\uc21c\ud788 \ubaa8\ub378\uc758 \uc131\ub2a5\ub9cc\uc774 \uc544\ub2cc \ub2e4\ub978 \ub9ce\uc740 \ubd80\ubd84\uc744 \uace0\ub824\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uad6c\uae00\uc740 \uc774\ub7f0 \ubb38\uc81c\ub97c 2015\ub144 ",(0,i.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),"\uc5d0\uc11c \uc9c0\uc801\ud55c \ubc14 \uc788\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uc774 \ub17c\ubb38\uc774 \ub098\uc62c \ub2f9\uc2dc\uc5d0\ub294 \uc544\uc9c1 \ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub525\ub7ec\ub2dd\uacfc \uba38\uc2e0\ub7ec\ub2dd\uc758 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud558\uae30 \ubc14\uc05c \uc2dc\uae30\uc600\uae30 \ub54c\ubb38\uc5d0, \ub17c\ubb38\uc774 \uc9c0\uc801\ud558\ub294 \ubc14\uc5d0 \ub9ce\uc740 \uc8fc\uc758\ub97c \uae30\uc6b8\uc774\uc9c0\ub294 \uc54a\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uadf8\ub9ac\uace0 \uba87 \ub144\uc774 \uc9c0\ub09c \ud6c4 \uba38\uc2e0\ub7ec\ub2dd\uacfc \ub525\ub7ec\ub2dd\uc740 \uac00\ub2a5\uc131\uc744 \uc785\uc99d\ud574\ub0b4\uc5b4, \uc774\uc81c \uc0ac\ub78c\ub4e4\uc740 \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0 \uc801\uc6a9\ud558\uace0\uc790 \ud588\uc2b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \uace7 \ub9ce\uc740 \uc0ac\ub78c\uc774 \uc2e4\uc81c \uc11c\ube44\uc2a4\ub294 \uc27d\uc9c0 \uc54a\ub2e4\ub294 \uac83\uc744 \uae68\ub2ec\uc558\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"devops"},"Devops"),(0,i.kt)("p",null,"MLOps\ub294 \uc774\uc804\uc5d0 \uc5c6\ub358 \uc0c8\ub85c\uc6b4 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c DevOps\ub77c\uace0 \ubd88\ub9ac\ub294 \uac1c\ubc1c \ubc29\ubc95\ub860\uc5d0\uc11c \ud30c\uc0dd\ub41c \ub2e8\uc5b4\uc785\ub2c8\ub2e4. \uadf8\ub807\uae30\uc5d0 DevOps\ub97c \uc774\ud574\ud55c\ub2e4\uba74 MLOps\ub97c \uc774\ud574\ud558\ub294 \ub370 \ub3c4\uc6c0\uc774 \ub429\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"devops-1"},"DevOps"),(0,i.kt)("p",null,"DevOps\ub294 Development(\uac1c\ubc1c)\uc640 Operations(\uc6b4\uc601)\uc758 \ud569\uc131\uc5b4\ub85c \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc758 \uac1c\ubc1c(Development)\uacfc \uc6b4\uc601(Operations)\uc758 \ud569\uc131\uc5b4\ub85c\uc11c \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c\uc790\uc640 \uc815\ubcf4\uae30\uc220 \uc804\ubb38\uac00 \uac04\uc758 \uc18c\ud1b5, \ud611\uc5c5 \ubc0f \ud1b5\ud569\uc744 \uac15\uc870\ud558\ub294 \uac1c\ubc1c \ud658\uacbd\uc774\ub098 \ubb38\ud654\ub97c \ub9d0\ud569\ub2c8\ub2e4.\nDevOps\uc758 \ubaa9\uc801\uc740 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \uc870\uc9c1\uacfc \uc6b4\uc601 \uc870\uc9c1\uac04\uc758 \uc0c1\ud638 \uc758\uc874\uc801 \ub300\uc751\uc774\uba70 \uc870\uc9c1\uc774 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uacfc \uc11c\ube44\uc2a4\ub97c \ube60\ub978 \uc2dc\uac04\uc5d0 \uac1c\ubc1c \ubc0f \ubc30\ud3ec\ud558\ub294 \uac83\uc744 \ubaa9\uc801\uc73c\ub85c \ud569\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,i.kt)("p",null,"\uadf8\ub7fc \uac04\ub2e8\ud55c \uc0c1\ud669 \uc124\uba85\uc744 \ud1b5\ud574 DevOps\uac00 \uc65c \ud544\uc694\ud55c\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4 \ucd08\uae30\uc5d0\ub294 \uc9c0\uc6d0\ud558\ub294 \uae30\ub2a5\uc774 \ub9ce\uc9c0 \uc54a\uc73c\uba70 \ud300 \ub610\ub294 \ud68c\uc0ac\uc758 \uaddc\ubaa8\uac00 \uc791\uc2b5\ub2c8\ub2e4. \uc774\ub54c\uc5d0\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uad6c\ubd84\uc774 \uc5c6\uac70\ub098 \uc791\uc740 \uaddc\ubaa8\uc758 \ud300\uc73c\ub85c \uad6c\ubd84\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \ud575\uc2ec\uc740 \uaddc\ubaa8\uac00 \uc791\ub2e4\ub294 \uac83\uc5d0 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub54c\ub294 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \uc811\uc810\uc774 \ub9ce\uace0, \uc9d1\uc911\ud574\uc57c \ud558\ub294 \uc11c\ube44\uc2a4\uac00 \uc801\uae30 \ub54c\ubb38\uc5d0 \ube60\ub974\uac8c \uc11c\ube44\uc2a4\ub97c \uac1c\uc120\ud574 \ub098\uac08 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \ucee4\uc9c8\uc218\ub85d \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc740 \ubd84\ub9ac\ub418\uace0 \uc11c\ub85c \uc18c\ud1b5\ud560 \uc218 \uc788\ub294 \ucc44\ub110\uc758 \ubb3c\ub9ac\uc801\uc778 \ud55c\uacc4\uac00 \uc624\uac8c \ub429\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\ub978 \ud300\uacfc \ud568\uaed8\ud558\ub294 \ubbf8\ud305\uc5d0 \ud300\uc6d0 \uc804\uccb4\uac00 \ubbf8\ud305\uc744 \ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c \uac01 \ud300\uc758 \ud300\uc7a5 \ud639\uc740 \uc18c\uc218\uc758 \uc2dc\ub2c8\uc5b4\ub9cc \ucc38\uc11d\ud558\uc5ec \ubbf8\ud305\uc744 \uc9c4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7f0 \uc18c\ud1b5 \ucc44\ub110\uc758 \ud55c\uacc4\ub294 \ud544\uc5f0\uc801\uc73c\ub85c \uc18c\ud1b5\uc758 \ubd80\uc7ac\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uadf8\ub7ec\ub2e4 \ubcf4\uba74 \uac1c\ubc1c\ud300\uc740 \uc0c8\ub85c\uc6b4 \uae30\ub2a5\ub4e4\uc744 \uacc4\uc18d\ud574\uc11c \uac1c\ubc1c\ud558\uace0 \uc6b4\uc601\ud300 \uc785\uc7a5\uc5d0\uc11c\ub294 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \ubc30\ud3ec \uc2dc \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\ub294 \ub4f1 \uc5ec\ub7ec \ubb38\uc81c\uac00 \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\uc704\uc640 \uac19\uc740 \uc0c1\ud669\uc774 \ubc18\ubcf5\ub418\uba74 \uc870\uc9c1 \uc774\uae30\uc8fc\uc758\ub77c\uace0 \ubd88\ub9ac\ub294 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"silo",src:n(745).Z,width:"892",height:"498"})),(0,i.kt)("blockquote",null,(0,i.kt)("p",{parentName:"blockquote"},"\uc0ac\uc77c\ub85c(silo)\ub294 \uace1\uc2dd\uc774\ub098 \uc0ac\ub8cc\ub97c \uc800\uc7a5\ud558\ub294 \uad74\ub69d \ubaa8\uc591\uc758 \ucc3d\uace0\ub97c \uc758\ubbf8\ud55c\ub2e4. \uc0ac\uc77c\ub85c\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc874\uc7ac\ud558\uba70 \uc800\uc7a5\ub418\ub294 \ubb3c\ud488\uc774 \uc11c\ub85c \uc11e\uc774\uc9c0 \uc54a\ub3c4\ub85d \ucca0\uc800\ud788 \uad00\ub9ac\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc900\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc0ac\uc77c\ub85c \ud6a8\uacfc(Organizational Silos Effect)\ub294 \uc870\uc9c1 \ubd80\uc11c \uac04\uc5d0 \uc11c\ub85c \ud611\ub825\ud558\uc9c0 \uc54a\uace0 \ub0b4\ubd80 \uc774\uc775\ub9cc\uc744 \ucd94\uad6c\ud558\ub294 \ud604\uc0c1\uc744 \uc758\ubbf8\ud55c\ub2e4. \uc870\uc9c1 \ub0b4\uc5d0\uc11c \uac1c\ubcc4 \ubd80\uc11c\ub07c\ub9ac \uc11c\ub85c \ub2f4\uc744 \uc313\uace0 \uac01\uc790\uc758 \uc774\uc775\uc5d0\ub9cc \ubab0\ub450\ud558\ub294 \ubd80\uc11c \uc774\uae30\uc8fc\uc758\ub97c \uc77c\uceeb\ub294\ub2e4.")),(0,i.kt)("p",null,"\uc0ac\uc77c\ub85c \ud604\uc0c1\uc740 \uc11c\ube44\uc2a4 \ud488\uc9c8\uc758 \uc800\ud558\ub85c \uc774\uc5b4\uc9c0\uac8c \ub429\ub2c8\ub2e4. \uc774\ub7ec\ud55c \uc0ac\uc77c\ub85c \ud604\uc0c1\uc744 \ud574\uacb0\ud558\uae30 \uc704\ud574 \ub098\uc628 \uac83\uc774 \ubc14\ub85c DevOps\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"cicd"},"CI/CD"),(0,i.kt)("p",null,"Continuous Integration(CI) \uc640 Continuous Delivery (CD)\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300\uc758 \uc7a5\ubcbd\uc744 \ud574\uc81c\ud558\uae30 \uc704\ud55c \uad6c\uccb4\uc801\uc778 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"cicd",src:n(3984).Z,width:"1400",height:"299"})),(0,i.kt)("p",null,"\uc774 \ubc29\ubc95\uc744 \ud1b5\ud574\uc11c \uac1c\ubc1c\ud300\uc5d0\uc11c\ub294 \uc6b4\uc601\ud300\uc758 \ud658\uacbd\uc744 \uc774\ud574\ud558\uace0 \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c \uc911\uc778 \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uae4c\uc9c0 \uc774\uc5b4\uc9c8 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4. \uc6b4\uc601\ud300\uc740 \uac80\uc99d\ub41c \uae30\ub2a5 \ub610\ub294 \uac1c\uc120\ub41c \uc81c\ud488\uc744 \ub354 \uc790\uc8fc \ubc30\ud3ec\ud574 \uace0\uac1d\uc758 \uc81c\ud488 \uacbd\ud5d8\uc744 \uc0c1\uc2b9\uc2dc\ud0b5\ub2c8\ub2e4.",(0,i.kt)("br",{parentName:"p"}),"\n","\uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \uc885\ud569\ud558\uc790\uba74 DevOps\ub294 \uac1c\ubc1c\ud300\uacfc \uc6b4\uc601\ud300 \uac04\uc758 \ubb38\uc81c\uac00 \uc788\uc5c8\uace0 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\ub860\uc785\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"mlops"},"MLOps"),(0,i.kt)("h3",{id:"1-mlops"},"1) ML+Ops"),(0,i.kt)("p",null,"MLOps\ub294 Machine Learning \uacfc Operations\uc758 \ud569\uc131\uc5b4\ub85c DevOps\uc5d0\uc11c Dev\uac00 ML\ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\uc81c \uc55e\uc5d0\uc11c \uc0b4\ud3b4\ubcf8 DevOps\ub97c \ud1b5\ud574 MLOps\uac00 \ubb34\uc5c7\uc778\uc9c0 \uc9d0\uc791\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\u201cMLOps\ub294 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc758 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud55c \ubc29\ubc95\uc785\ub2c8\ub2e4.\u201d\n\uc774 \ub9d0\uc740 \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300 \uc0ac\uc774\uc5d0 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\ub2e4\ub294 \uc758\ubbf8\uc785\ub2c8\ub2e4. \uadf8\ub7fc \uc65c \uba38\uc2e0\ub7ec\ub2dd\ud300\uacfc \uc6b4\uc601\ud300\uc5d0\ub294 \ubb38\uc81c\uac00 \ubc1c\uc0dd\ud588\uc744\uae4c\uc694? \ub450 \ud300 \uac04\uc758 \ubb38\uc81c\ub97c \uc54c\uc544\ubcf4\uae30 \uc704\ud574\uc11c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \uc608\uc2dc\ub85c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"rule-based"},"Rule Based"),(0,i.kt)("p",null,"\ucc98\uc74c \ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc744 \ub9cc\ub4dc\ub294 \uacbd\uc6b0 \uac04\ub2e8\ud55c \uaddc\uce59\uc744 \uae30\ubc18\uc73c\ub85c \uc544\uc774\ud15c\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4\uc11c 1\uc8fc\uc77c\uac04 \ud310\ub9e4\ub7c9\uc774 \uac00\uc7a5 \ub9ce\uc740 \uc21c\uc11c\ub300\ub85c \ubcf4\uc5ec\uc8fc\ub294 \uc2dd\uc758 \ubc29\uc2dd\uc744 \uc774\uc6a9\ud569\ub2c8\ub2e4. \uc774 \ubc29\uc2dd\uc73c\ub85c \ubaa8\ub378\uc744 \uc815\ud55c\ub2e4\uba74 \ud2b9\ubcc4\ud55c \uc774\uc720\uac00 \uc5c6\ub294 \uc774\uc0c1 \ubaa8\ub378\uc758 \uc218\uc815\uc774 \ud544\uc694 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"machine-learning"},"Machine Learning"),(0,i.kt)("p",null,"\uc11c\ube44\uc2a4\uc758 \uaddc\ubaa8\uac00 \uc870\uae08 \ucee4\uc9c0\uace0 \ub85c\uadf8 \ub370\uc774\ud130\uac00 \ub9ce\uc774 \uc313\uc778\ub2e4\uba74 \uc774\ub97c \uc774\uc6a9\ud574 \uc544\uc774\ud15c \uae30\ubc18 \ud639\uc740 \uc720\uc800 \uae30\ubc18\uc758 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4. \uc774\ub54c \ubaa8\ub378\uc740 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("h4",{id:"deep-learning"},"Deep Learning"),(0,i.kt)("p",null,"\uac1c\uc778\ud654 \ucd94\ucc9c\uc5d0 \ub300\ud55c \uc694\uad6c\uac00 \ub354 \ucee4\uc9c0\uace0 \ub354 \uc88b\uc740 \uc131\ub2a5\uc744 \ub0b4\ub294 \ubaa8\ub378\uc744 \ud544\uc694\ud574\uc9c8 \uacbd\uc6b0 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \uc774\ub54c \ub9cc\ub4dc\ub294 \ubaa8\ub378\uc740 \uba38\uc2e0\ub7ec\ub2dd\uacfc \uac19\uc774 \uc815\ud574\uc9c4 \uc8fc\uae30\uc5d0 \ub530\ub77c \ubaa8\ub378\uc744 \uc7ac\ud559\uc2b5 \ud6c4 \uc7ac\ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"graph",src:n(4204).Z,width:"752",height:"582"})),(0,i.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc744 x\ucd95\uc744 \ubaa8\ub378\uc758 \ubcf5\uc7a1\ub3c4, y\ucd95\uc744 \ubaa8\ub378\uc758 \uc131\ub2a5\uc73c\ub85c \ub450\uace0 \uadf8\ub798\ud504\ub85c \ud45c\ud604\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf5\uc7a1\ub3c4\uac00 \uc62c\ub77c\uac08 \ub54c \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \uc62c\ub77c\uac00\ub294 \uc0c1\uc2b9 \uad00\uacc4\ub97c \uac16\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ub525\ub7ec\ub2dd\uc73c\ub85c \ub118\uc5b4\uac08 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc774 \uc0c8\ub85c \uc0dd\uae30\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ub9cc\uc57d \uad00\ub9ac\ud574\uc57c\ud560 \ubaa8\ub378\uc774 \uc801\ub2e4\uba74 \uc11c\ub85c \ud611\uc5c5\uc744 \ud1b5\ud574\uc11c \ucda9\ubd84\ud788 \ud574\uacb0\ud560 \uc218 \uc788\uc9c0\ub9cc \uac1c\ubc1c\ud574\uc57c \ud560 \ubaa8\ub378\uc774 \ub9ce\uc544\uc9c4\ub2e4\uba74 DevOps\uc758 \uacbd\uc6b0\uc640 \uac19\uc774 \uc0ac\uc77c\ub85c \ud604\uc0c1\uc774 \ub098\ud0c0\ub098\uac8c \ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,"DevOps\uc758 \ubaa9\ud45c\uc640 \ub9de\ucdb0\uc11c \uc0dd\uac01\ud574\ubcf4\uba74 MLOps\uc758 \ubaa9\ud45c\ub294 \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud14c\uc2a4\ud2b8\ud558\ub294 \uac83\uc785\ub2c8\ub2e4. \uac1c\ubc1c\ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \uae30\ub2a5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc774 DevOps\uc758 \ubaa9\ud45c\uc600\ub2e4\uba74, MLOps\uc758 \ubaa9\ud45c\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uac1c\ubc1c\ud55c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ub420 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,i.kt)("p",null,"\ud558\uc9c0\ub9cc \ucd5c\uadfc \ub098\uc624\uace0 \uc788\ub294 MLOps \uad00\ub828 \uc81c\ud488\uacfc \uc124\uba85\uc744 \ubcf4\uba74 \uaf2d \uc55e\uc5d0\uc11c \uc124\uba85\ud55c \ubaa9\ud45c\ub9cc\uc744 \ub300\uc0c1\uc73c\ub85c \ud558\uace0 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc5b4\ub5a4 \uacbd\uc6b0\uc5d0\ub294 \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \ub9cc\ub4e0 \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub824\uace0 \ud569\ub2c8\ub2e4. \uc774\ub7ec\ud55c \ub2c8\uc988\ub294 \ucd5c\uadfc \uba38\uc2e0\ub7ec\ub2dd \ud504\ub85c\uc81d\ud2b8\uac00 \uc9c4\ud589\ub418\ub294 \uacfc\uc815\uc5d0\uc11c \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,"\ucd94\ucc9c\uc2dc\uc2a4\ud15c\uc758 \uacbd\uc6b0 \uc6b4\uc601\uc5d0\uc11c \uac04\ub2e8\ud55c \ubaa8\ub378\ubd80\ud130 \uc2dc\uc791\ud574 \uc6b4\uc601\ud560 \uc218 \uc788\uc5c8\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc790\uc5f0\uc5b4, \uc774\ubbf8\uc9c0\uc640 \uac19\uc740 \uacf3\uc5d0\uc11c\ub294 \uaddc\uce59 \uae30\ubc18\uc758 \ubaa8\ub378\ubcf4\ub2e4\ub294 \ub525\ub7ec\ub2dd\uc744 \uc774\uc6a9\ud574 \uc8fc\uc5b4\uc9c4 \ud0dc\uc2a4\ud06c\ub97c \ud574\uacb0\ud560 \uc218 \uc788\ub294\uc9c0 \uac80\uc99d(POC)\ub97c \uc120\ud589\ud558\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4. \uac80\uc99d\uc774 \ub05d\ub09c \ud504\ub85c\uc81d\ud2b8\ub294 \uc774\uc81c \uc11c\ube44\uc2a4\ub97c \uc704\ud55c \uc6b4\uc601 \ud658\uacbd\uc744 \uac1c\ubc1c\ud558\uae30 \uc2dc\uc791\ud569\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uba38\uc2e0\ub7ec\ub2dd \ud300 \ub0b4\uc758 \uc790\uccb4 \uc5ed\ub7c9\uc73c\ub85c\ub294 \uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc27d\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c MLOps\uac00 \ud544\uc694\ud55c \uacbd\uc6b0\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"3-\uacb0\ub860"},"3) \uacb0\ub860"),(0,i.kt)("p",null,"\uc694\uc57d\ud558\uc790\uba74 MLOps\ub294 \ub450 \uac00\uc9c0 \ubaa9\ud45c\uac00 \uc788\uc2b5\ub2c8\ub2e4.\n\uc55e\uc5d0\uc11c \uc124\uba85\ud55c MLOps\ub294 ML+Ops \ub85c \ub450 \ud300\uc758 \uc0dd\uc0b0\uc131 \ud5a5\uc0c1\uc744 \uc704\ud55c \uac83\uc774\uc600\uc2b5\ub2c8\ub2e4.\n\ubc18\uba74, \ub4a4\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uc740 ML->Ops \ub85c \uba38\uc2e0\ub7ec\ub2dd \ud300\uc5d0\uc11c \uc9c1\uc811 \uc6b4\uc601\uc744 \ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 \uac83\uc744 \ub9d0\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},3984:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},4204:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},745:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file diff --git a/assets/js/36fd762b.66852ff2.js b/assets/js/36fd762b.8d9d7a1c.js similarity index 98% rename from assets/js/36fd762b.66852ff2.js rename to assets/js/36fd762b.8d9d7a1c.js index fce3c4b6..2d27b1ab 100644 --- a/assets/js/36fd762b.66852ff2.js +++ b/assets/js/36fd762b.8d9d7a1c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6052],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function s(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,s=e.originalType,u=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=o(n),k=a,m=c["".concat(u,".").concat(k)]||c[k]||d[k]||s;return n?r.createElement(m,l(l({ref:t},p),{},{components:n})):r.createElement(m,l({ref:t},p))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var s=n.length,l=new Array(s);l[0]=k;var i={};for(var u in t)hasOwnProperty.call(t,u)&&(i[u]=t[u]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>s,metadata:()=>i,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const s={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/docs/1.0/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"k3s \uc5d0\uc11c\ub294 \uae30\ubcf8\uac12\uc73c\ub85c containerd\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc774\uc6a9\ud574 \uc124\uce58\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc800\ud76c\ub294 GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c docker\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc0ac\uc6a9\ud574\uc57c \ud558\ubbc0\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"--docker")," \uc635\uc158\uc744 \ud1b5\ud574 \ubc31\uc5d4\ub4dc\ub97c docker\ub85c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,a.kt)("p",null,"k3s\ub97c \uc124\uce58 \ud6c4 k3s config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud56d\ubaa9\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\ubcf4\uc548 \ubb38\uc81c\uc640 \uad00\ub828\ub41c \ud0a4\ub4e4\uc740 <...>\ub85c \uac00\ub838\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,a.kt)("p",null,"k3s config\ub97c \ud074\ub7ec\uc2a4\ud130\uc758 kubeconfig\ub85c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c \ubcf5\uc0ac\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,a.kt)("p",null,"\ubcf5\uc0ac\ub41c config \ud30c\uc77c\uc5d0 user\uac00 \uc811\uadfc\ud560 \uc218 \uc788\ub294 \uad8c\ud55c\uc744 \uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,a.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,a.kt)("p",null,"\uc774\uc81c \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc124\uc815\ud55c kubeconfig\ub97c \ub85c\uceec\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4.\n\ub85c\uceec\uc5d0\uc11c\ub294 \uacbd\ub85c\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"~/.kube/config"),"\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucc98\uc74c \ubcf5\uc0ac\ud55c config \ud30c\uc77c\uc5d0\ub294 server ip\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443")," \uc73c\ub85c \ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\uc744 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\uac8c \uc218\uc815\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\ucdb0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," \uc73c\ub85c \uc218\uc815\ud588\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,a.kt)("h2",{id:"6-references"},"6. References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6052],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function s(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,s=e.originalType,u=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=o(n),k=a,m=c["".concat(u,".").concat(k)]||c[k]||d[k]||s;return n?r.createElement(m,l(l({ref:t},p),{},{components:n})):r.createElement(m,l({ref:t},p))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var s=n.length,l=new Array(s);l[0]=k;var i={};for(var u in t)hasOwnProperty.call(t,u)&&(i[u]=t[u]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>s,metadata:()=>i,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const s={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/docs/1.0/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"k3s \uc5d0\uc11c\ub294 \uae30\ubcf8\uac12\uc73c\ub85c containerd\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc774\uc6a9\ud574 \uc124\uce58\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc800\ud76c\ub294 GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c docker\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc0ac\uc6a9\ud574\uc57c \ud558\ubbc0\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"--docker")," \uc635\uc158\uc744 \ud1b5\ud574 \ubc31\uc5d4\ub4dc\ub97c docker\ub85c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,a.kt)("p",null,"k3s\ub97c \uc124\uce58 \ud6c4 k3s config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud56d\ubaa9\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\ubcf4\uc548 \ubb38\uc81c\uc640 \uad00\ub828\ub41c \ud0a4\ub4e4\uc740 <...>\ub85c \uac00\ub838\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,a.kt)("p",null,"k3s config\ub97c \ud074\ub7ec\uc2a4\ud130\uc758 kubeconfig\ub85c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c \ubcf5\uc0ac\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,a.kt)("p",null,"\ubcf5\uc0ac\ub41c config \ud30c\uc77c\uc5d0 user\uac00 \uc811\uadfc\ud560 \uc218 \uc788\ub294 \uad8c\ud55c\uc744 \uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,a.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,a.kt)("p",null,"\uc774\uc81c \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc124\uc815\ud55c kubeconfig\ub97c \ub85c\uceec\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4.\n\ub85c\uceec\uc5d0\uc11c\ub294 \uacbd\ub85c\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"~/.kube/config"),"\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucc98\uc74c \ubcf5\uc0ac\ud55c config \ud30c\uc77c\uc5d0\ub294 server ip\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443")," \uc73c\ub85c \ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\uc744 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\uac8c \uc218\uc815\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\ucdb0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," \uc73c\ub85c \uc218\uc815\ud588\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,a.kt)("h2",{id:"6-references"},"6. References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/38642333.6757cffa.js b/assets/js/38642333.107370bc.js similarity index 99% rename from assets/js/38642333.6757cffa.js rename to assets/js/38642333.107370bc.js index 280f3849..01c52012 100644 --- a/assets/js/38642333.6757cffa.js +++ b/assets/js/38642333.107370bc.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6643],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function s(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=p(t),u=l,g=m["".concat(i,".").concat(u)]||m[u]||c[u]||r;return t?a.createElement(g,s(s({ref:n},d),{},{components:t})):a.createElement(g,s({ref:n},d))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,s=new Array(r);s[0]=u;var o={};for(var i in n)hasOwnProperty.call(n,i)&&(o[i]=n[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const r={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},s=void 0,o={unversionedId:"api-deployment/seldon-iris",id:"version-1.0/api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/docs/1.0/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-iris.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/docs/1.0/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/docs/1.0/api-deployment/seldon-pg"}},i={},p=[{value:"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30",id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:3},{value:"2. \uc2a4\ud399 \uc815\uc758",id:"2-\uc2a4\ud399-\uc815\uc758",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Swagger \uc811\uc18d",id:"1-swagger-\uc811\uc18d",level:3},{value:"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd",id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd",level:3},{value:"3. Try it out \uc120\ud0dd",id:"3-try-it-out-\uc120\ud0dd",level:3},{value:"4. Request body\uc5d0 data \uc785\ub825",id:"4-request-body\uc5d0-data-\uc785\ub825",level:3},{value:"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778",id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778",level:3},{value:"Using CLI",id:"using-cli",level:2}],d={toc:p},m="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(m,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30"},"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30"),(0,l.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\uc744 \ub54c SeldonDeployment\ub97c \ud1b5\ud574 API Deployment\ub97c \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\nSeldonDeployment\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\uc5d0 \ubaa8\ub378\uc744 REST/gRPC \uc11c\ubc84\uc758 \ud615\ud0dc\ub85c \ubc30\ud3ec\ud558\uae30 \uc704\ud574 \uc815\uc758\ub41c CRD(CustomResourceDefinition)\uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"SeldonDeployment \uad00\ub828\ub41c \uc2e4\uc2b5\uc740 seldon-deploy\ub77c\ub294 \uc0c8\ub85c\uc6b4 \ub124\uc784\uc2a4\ud398\uc774\uc2a4(namespace)\uc5d0\uc11c \uc9c4\ud589\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub97c \uc0dd\uc131\ud55c \ub4a4, seldon-deploy\ub97c \ud604\uc7ac \ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-\uc2a4\ud399-\uc815\uc758"},"2. \uc2a4\ud399 \uc815\uc758"),(0,l.kt)("p",null,"SeldonDeployment\ub97c \ubc30\ud3ec\ud558\uae30 \uc704\ud55c yaml \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uacf5\uac1c\ub41c iris model\uc744 \uc0ac\uc6a9\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\uc774 iris model\uc740 sklearn \ud504\ub808\uc784\uc6cc\ud06c\ub97c \ud1b5\ud574 \ud559\uc2b5\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 SKLEARN_SERVER\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"yaml \ud30c\uc77c\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uac00 \ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"\uc774\uc81c \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ucd94\ub860 \uc694\uccad(predict request)\ub97c \ubcf4\ub0b4\uc11c \ucd94\ub860 \uacb0\uad0f\uac12\uc744 \ubc1b\uc544\uc635\ub2c8\ub2e4.\n\ubc30\ud3ec\ub41c API\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uaddc\uce59\uc73c\ub85c \uc0dd\uc131\ub429\ub2c8\ub2e4.\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-seldon"},"Seldon Core \uc124\uce58 \uc2dc, Ambassador\ub97c Ingress Controller\ub85c \uc124\uc815\ud558\uc600\uc73c\ubbc0\ub85c"),", SeldonDeployment\ub85c \uc0dd\uc131\ub41c API \uc11c\ubc84\ub294 \ubaa8\ub450 Ambassador\uc758 Ingress gateway\ub97c \ud1b5\ud574 \uc694\uccad\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub530\ub77c\uc11c \uc6b0\uc120 Ambassador Ingress Gateway\uc758 url\uc744 \ud658\uacbd \ubcc0\uc218\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"\uc124\uc815\ub41c url\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uc5b4\uc57c \ud558\uba70, \ud074\ub77c\uc6b0\ub4dc \ub4f1\uc744 \ud1b5\ud574 \uc124\uc815\ud560 \uacbd\uc6b0, internal ip \uc8fc\uc18c\uac00 \uc124\uc815\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"SeldonDeployment\uac00 \ubc30\ud3ec\ub41c ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\uc640 ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\n\uc774\ub294 \uc2a4\ud399\uc744 \uc815\uc758\ud560 \ub54c metadata\uc5d0 \uc815\uc758\ub41c \uac12\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\ub294 seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\uc740 sklearn \uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"SeldonDeployment\uc5d0\uc11c \uc8fc\ub85c \uc0ac\uc6a9\ud558\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name"),"\uc740 \ub450 \uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"\uac01\uac01\uc758 method\uc758 \uc790\uc138\ud55c \uc0ac\uc6a9 \ubc29\ubc95\uc740 \uc544\ub798\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"\uc6b0\uc120 doc method\ub97c \uc0ac\uc6a9\ud558\ub294 \ubc29\ubc95\uc785\ub2c8\ub2e4. doc method\ub97c \uc774\uc6a9\ud558\uba74 seldon\uc5d0\uc11c \uc0dd\uc131\ud55c swagger\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-swagger-\uc811\uc18d"},"1. Swagger \uc811\uc18d"),(0,l.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c ingress url \uaddc\uce59\uc5d0 \ub530\ub77c \uc544\ub798 \uc8fc\uc18c\ub97c \ud1b5\ud574 swagger\uc5d0 \uc811\uadfc\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(1093).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd"},"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd"),(0,l.kt)("p",null,"UI\uc5d0\uc11c ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," \uba54\ub274\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(1981).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-try-it-out-\uc120\ud0dd"},"3. ",(0,l.kt)("em",{parentName:"h3"},"Try it out")," \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(4736).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-request-body\uc5d0-data-\uc785\ub825"},"4. Request body\uc5d0 data \uc785\ub825"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(4808).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"\ub2e4\uc74c \ub370\uc774\ud130\ub97c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778"},"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," \ubc84\ud2bc\uc744 \ub20c\ub7ec\uc11c \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(6470).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\ub860 \uacb0\uacfc\ub97c \uc5bb\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"\ub610\ud55c, curl\uacfc \uac19\uc740 http client CLI \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc11c\ub3c4 API \uc694\uccad\uc744 \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions"),"\ub97c \uc694\uccad\ud558\uba74"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \uc751\ub2f5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}c.isMDXComponent=!0},1093:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},1981:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},4736:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},4808:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},6470:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6643],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function s(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=p(t),u=l,g=m["".concat(i,".").concat(u)]||m[u]||c[u]||r;return t?a.createElement(g,s(s({ref:n},d),{},{components:t})):a.createElement(g,s({ref:n},d))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,s=new Array(r);s[0]=u;var o={};for(var i in n)hasOwnProperty.call(n,i)&&(o[i]=n[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const r={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},s=void 0,o={unversionedId:"api-deployment/seldon-iris",id:"version-1.0/api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/docs/1.0/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-iris.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/docs/1.0/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/docs/1.0/api-deployment/seldon-pg"}},i={},p=[{value:"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30",id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:3},{value:"2. \uc2a4\ud399 \uc815\uc758",id:"2-\uc2a4\ud399-\uc815\uc758",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Swagger \uc811\uc18d",id:"1-swagger-\uc811\uc18d",level:3},{value:"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd",id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd",level:3},{value:"3. Try it out \uc120\ud0dd",id:"3-try-it-out-\uc120\ud0dd",level:3},{value:"4. Request body\uc5d0 data \uc785\ub825",id:"4-request-body\uc5d0-data-\uc785\ub825",level:3},{value:"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778",id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778",level:3},{value:"Using CLI",id:"using-cli",level:2}],d={toc:p},m="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(m,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30"},"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30"),(0,l.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\uc744 \ub54c SeldonDeployment\ub97c \ud1b5\ud574 API Deployment\ub97c \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\nSeldonDeployment\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\uc5d0 \ubaa8\ub378\uc744 REST/gRPC \uc11c\ubc84\uc758 \ud615\ud0dc\ub85c \ubc30\ud3ec\ud558\uae30 \uc704\ud574 \uc815\uc758\ub41c CRD(CustomResourceDefinition)\uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"SeldonDeployment \uad00\ub828\ub41c \uc2e4\uc2b5\uc740 seldon-deploy\ub77c\ub294 \uc0c8\ub85c\uc6b4 \ub124\uc784\uc2a4\ud398\uc774\uc2a4(namespace)\uc5d0\uc11c \uc9c4\ud589\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub97c \uc0dd\uc131\ud55c \ub4a4, seldon-deploy\ub97c \ud604\uc7ac \ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-\uc2a4\ud399-\uc815\uc758"},"2. \uc2a4\ud399 \uc815\uc758"),(0,l.kt)("p",null,"SeldonDeployment\ub97c \ubc30\ud3ec\ud558\uae30 \uc704\ud55c yaml \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uacf5\uac1c\ub41c iris model\uc744 \uc0ac\uc6a9\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\uc774 iris model\uc740 sklearn \ud504\ub808\uc784\uc6cc\ud06c\ub97c \ud1b5\ud574 \ud559\uc2b5\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 SKLEARN_SERVER\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"yaml \ud30c\uc77c\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uac00 \ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"\uc774\uc81c \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ucd94\ub860 \uc694\uccad(predict request)\ub97c \ubcf4\ub0b4\uc11c \ucd94\ub860 \uacb0\uad0f\uac12\uc744 \ubc1b\uc544\uc635\ub2c8\ub2e4.\n\ubc30\ud3ec\ub41c API\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uaddc\uce59\uc73c\ub85c \uc0dd\uc131\ub429\ub2c8\ub2e4.\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-seldon"},"Seldon Core \uc124\uce58 \uc2dc, Ambassador\ub97c Ingress Controller\ub85c \uc124\uc815\ud558\uc600\uc73c\ubbc0\ub85c"),", SeldonDeployment\ub85c \uc0dd\uc131\ub41c API \uc11c\ubc84\ub294 \ubaa8\ub450 Ambassador\uc758 Ingress gateway\ub97c \ud1b5\ud574 \uc694\uccad\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub530\ub77c\uc11c \uc6b0\uc120 Ambassador Ingress Gateway\uc758 url\uc744 \ud658\uacbd \ubcc0\uc218\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"\uc124\uc815\ub41c url\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uc5b4\uc57c \ud558\uba70, \ud074\ub77c\uc6b0\ub4dc \ub4f1\uc744 \ud1b5\ud574 \uc124\uc815\ud560 \uacbd\uc6b0, internal ip \uc8fc\uc18c\uac00 \uc124\uc815\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"SeldonDeployment\uac00 \ubc30\ud3ec\ub41c ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\uc640 ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\n\uc774\ub294 \uc2a4\ud399\uc744 \uc815\uc758\ud560 \ub54c metadata\uc5d0 \uc815\uc758\ub41c \uac12\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\ub294 seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\uc740 sklearn \uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"SeldonDeployment\uc5d0\uc11c \uc8fc\ub85c \uc0ac\uc6a9\ud558\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name"),"\uc740 \ub450 \uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"\uac01\uac01\uc758 method\uc758 \uc790\uc138\ud55c \uc0ac\uc6a9 \ubc29\ubc95\uc740 \uc544\ub798\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"\uc6b0\uc120 doc method\ub97c \uc0ac\uc6a9\ud558\ub294 \ubc29\ubc95\uc785\ub2c8\ub2e4. doc method\ub97c \uc774\uc6a9\ud558\uba74 seldon\uc5d0\uc11c \uc0dd\uc131\ud55c swagger\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-swagger-\uc811\uc18d"},"1. Swagger \uc811\uc18d"),(0,l.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c ingress url \uaddc\uce59\uc5d0 \ub530\ub77c \uc544\ub798 \uc8fc\uc18c\ub97c \ud1b5\ud574 swagger\uc5d0 \uc811\uadfc\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(1093).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd"},"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd"),(0,l.kt)("p",null,"UI\uc5d0\uc11c ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," \uba54\ub274\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(1981).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-try-it-out-\uc120\ud0dd"},"3. ",(0,l.kt)("em",{parentName:"h3"},"Try it out")," \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(4736).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-request-body\uc5d0-data-\uc785\ub825"},"4. Request body\uc5d0 data \uc785\ub825"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(4808).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"\ub2e4\uc74c \ub370\uc774\ud130\ub97c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778"},"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," \ubc84\ud2bc\uc744 \ub20c\ub7ec\uc11c \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(6470).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\ub860 \uacb0\uacfc\ub97c \uc5bb\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"\ub610\ud55c, curl\uacfc \uac19\uc740 http client CLI \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc11c\ub3c4 API \uc694\uccad\uc744 \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions"),"\ub97c \uc694\uccad\ud558\uba74"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \uc751\ub2f5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}c.isMDXComponent=!0},1093:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},1981:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},4736:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},4808:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},6470:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file diff --git a/assets/js/39b2b572.20978a65.js b/assets/js/39b2b572.5117de81.js similarity index 99% rename from assets/js/39b2b572.20978a65.js rename to assets/js/39b2b572.5117de81.js index 0c822b82..00ccbff4 100644 --- a/assets/js/39b2b572.20978a65.js +++ b/assets/js/39b2b572.5117de81.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8837],{3905:(n,e,r)=>{r.d(e,{Zo:()=>_,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function a(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function u(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):u(u({},e),n)),r},_=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},o="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,a=n.originalType,s=n.parentName,_=i(n,["components","mdxType","originalType","parentName"]),o=m(r),d=p,b=o["".concat(s,".").concat(d)]||o[d]||l[d]||a;return r?t.createElement(b,u(u({ref:e},_),{},{components:r})):t.createElement(b,u({ref:e},_))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var a=r.length,u=new Array(a);u[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[o]="string"==typeof n?n:p,u[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>a,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const a={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"version-1.0/kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/docs/1.0/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/docs/1.0/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/docs/1.0/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],_={toc:m},o="wrapper";function l(n){let{components:e,...a}=n;return(0,p.kt)(o,(0,t.Z)({},_,a,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc124\uc815\ud560 \uc218 \uc788\ub294 \uac12\ub4e4\uc5d0 \ub300\ud574 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"display-name"},"Display Name"),(0,p.kt)("p",null,"\uc0dd\uc131\ub41c \ud30c\uc774\ud504\ub77c\uc778 \ub0b4\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub450 \uac1c\uc758 \uc774\ub984\uc744 \uac16\uc2b5\ub2c8\ub2e4."),(0,p.kt)("ul",null,(0,p.kt)("li",{parentName:"ul"},"task_name: \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud560 \ub54c \uc791\uc131\ud55c \ud568\uc218 \uc774\ub984"),(0,p.kt)("li",{parentName:"ul"},"display_name: kubeflow UI\uc0c1\uc5d0 \ubcf4\uc774\ub294 \uc774\ub984")),(0,p.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uacbd\uc6b0 \ub450 \ucef4\ud3ec\ub10c\ud2b8 \ubaa8\ub450 Print and return number\ub85c \uc124\uc815\ub418\uc5b4 \uc788\uc5b4\uc11c \uc5b4\ub5a4 \ucef4\ud3ec\ub10c\ud2b8\uac00 1\ubc88\uc778\uc9c0 2\ubc88\uc778\uc9c0 \ud655\uc778\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"run-7",src:r(2687).Z,width:"3408",height:"2156"})),(0,p.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,p.kt)("p",null,"\uc774\ub97c \uc704\ud55c \uac83\uc774 \ubc14\ub85c display_name \uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc124\uc815\ud558\ub294 \ubc29\ubc95\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_display_name")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute"),"\ub97c \uc774\uc6a9\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc774 \uc804\uc758 \ud30c\uc77c\uacfc \ube44\uad50\ud558\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"pipelines.kubeflow.org/task_display_name")," key\uac00 \uc0c8\ub85c \uc0dd\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,p.kt)("p",null,"\uc704\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc774\uc804\uc5d0 \uc0dd\uc131\ud55c ",(0,p.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc758 \ubc84\uc804\uc744 \uc62c\ub9ac\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"adv-pipeline-0.png",src:r(139).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub7ec\uba74 \uc704\uc640 \uac19\uc774 \uc124\uc815\ud55c \uc774\ub984\uc774 \ub178\ucd9c\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"resources"},"Resources"),(0,p.kt)("h3",{id:"gpu"},"GPU"),(0,p.kt)("p",null,"\ud2b9\ubcc4\ud55c \uc124\uc815\uc774 \uc5c6\ub2e4\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud30c\ub4dc(pod)\ub85c \uc2e4\ud589\ud560 \ub54c, \uae30\ubcf8 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc73c\ub85c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d GPU\ub97c \uc0ac\uc6a9\ud574 \ubaa8\ub378\uc744 \ud559\uc2b5\ud574\uc57c \ud560 \ub54c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc0c1\uc5d0\uc11c GPU\ub97c \ud560\ub2f9\ubc1b\uc9c0 \ubabb\ud574 \uc81c\ub300\ub85c \ud559\uc2b5\uc774 \uc774\ub8e8\uc5b4\uc9c0\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc0dd\uc131\ub41c \ud30c\uc77c\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),"\ub97c \uc790\uc138\ud788 \ubcf4\uba74 resources\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," \ub3c4 \ucd94\uac00\ub41c \uac83\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \ud1b5\ud574 GPU\ub97c \ud560\ub2f9\ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,p.kt)("h3",{id:"cpu"},"CPU"),(0,p.kt)("p",null,"cpu\uc758 \uac1c\uc218\ub97c \uc815\ud558\uae30 \uc704\ud574\uc11c \uc774\uc6a9\ud558\ub294 \ud568\uc218\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","gpu\uc640\ub294 \ub2e4\ub978 \uc810\uc740 int\uac00 \uc544\ub2cc string\uc73c\ub85c \uc785\ub825\ud574\uc57c \ud55c\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,p.kt)("h3",{id:"memory"},"Memory"),(0,p.kt)("p",null,"\uba54\ubaa8\ub9ac\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},139:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},2687:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8837],{3905:(n,e,r)=>{r.d(e,{Zo:()=>_,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function a(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function u(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):u(u({},e),n)),r},_=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},o="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,a=n.originalType,s=n.parentName,_=i(n,["components","mdxType","originalType","parentName"]),o=m(r),d=p,b=o["".concat(s,".").concat(d)]||o[d]||l[d]||a;return r?t.createElement(b,u(u({ref:e},_),{},{components:r})):t.createElement(b,u({ref:e},_))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var a=r.length,u=new Array(a);u[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[o]="string"==typeof n?n:p,u[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>a,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const a={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"version-1.0/kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/docs/1.0/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/docs/1.0/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/docs/1.0/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],_={toc:m},o="wrapper";function l(n){let{components:e,...a}=n;return(0,p.kt)(o,(0,t.Z)({},_,a,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc124\uc815\ud560 \uc218 \uc788\ub294 \uac12\ub4e4\uc5d0 \ub300\ud574 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"display-name"},"Display Name"),(0,p.kt)("p",null,"\uc0dd\uc131\ub41c \ud30c\uc774\ud504\ub77c\uc778 \ub0b4\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub450 \uac1c\uc758 \uc774\ub984\uc744 \uac16\uc2b5\ub2c8\ub2e4."),(0,p.kt)("ul",null,(0,p.kt)("li",{parentName:"ul"},"task_name: \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud560 \ub54c \uc791\uc131\ud55c \ud568\uc218 \uc774\ub984"),(0,p.kt)("li",{parentName:"ul"},"display_name: kubeflow UI\uc0c1\uc5d0 \ubcf4\uc774\ub294 \uc774\ub984")),(0,p.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uacbd\uc6b0 \ub450 \ucef4\ud3ec\ub10c\ud2b8 \ubaa8\ub450 Print and return number\ub85c \uc124\uc815\ub418\uc5b4 \uc788\uc5b4\uc11c \uc5b4\ub5a4 \ucef4\ud3ec\ub10c\ud2b8\uac00 1\ubc88\uc778\uc9c0 2\ubc88\uc778\uc9c0 \ud655\uc778\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"run-7",src:r(2687).Z,width:"3408",height:"2156"})),(0,p.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,p.kt)("p",null,"\uc774\ub97c \uc704\ud55c \uac83\uc774 \ubc14\ub85c display_name \uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc124\uc815\ud558\ub294 \ubc29\ubc95\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_display_name")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute"),"\ub97c \uc774\uc6a9\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc774 \uc804\uc758 \ud30c\uc77c\uacfc \ube44\uad50\ud558\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"pipelines.kubeflow.org/task_display_name")," key\uac00 \uc0c8\ub85c \uc0dd\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,p.kt)("p",null,"\uc704\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc774\uc804\uc5d0 \uc0dd\uc131\ud55c ",(0,p.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc758 \ubc84\uc804\uc744 \uc62c\ub9ac\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"adv-pipeline-0.png",src:r(139).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub7ec\uba74 \uc704\uc640 \uac19\uc774 \uc124\uc815\ud55c \uc774\ub984\uc774 \ub178\ucd9c\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"resources"},"Resources"),(0,p.kt)("h3",{id:"gpu"},"GPU"),(0,p.kt)("p",null,"\ud2b9\ubcc4\ud55c \uc124\uc815\uc774 \uc5c6\ub2e4\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud30c\ub4dc(pod)\ub85c \uc2e4\ud589\ud560 \ub54c, \uae30\ubcf8 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc73c\ub85c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d GPU\ub97c \uc0ac\uc6a9\ud574 \ubaa8\ub378\uc744 \ud559\uc2b5\ud574\uc57c \ud560 \ub54c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc0c1\uc5d0\uc11c GPU\ub97c \ud560\ub2f9\ubc1b\uc9c0 \ubabb\ud574 \uc81c\ub300\ub85c \ud559\uc2b5\uc774 \uc774\ub8e8\uc5b4\uc9c0\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc0dd\uc131\ub41c \ud30c\uc77c\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),"\ub97c \uc790\uc138\ud788 \ubcf4\uba74 resources\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," \ub3c4 \ucd94\uac00\ub41c \uac83\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \ud1b5\ud574 GPU\ub97c \ud560\ub2f9\ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,p.kt)("h3",{id:"cpu"},"CPU"),(0,p.kt)("p",null,"cpu\uc758 \uac1c\uc218\ub97c \uc815\ud558\uae30 \uc704\ud574\uc11c \uc774\uc6a9\ud558\ub294 \ud568\uc218\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","gpu\uc640\ub294 \ub2e4\ub978 \uc810\uc740 int\uac00 \uc544\ub2cc string\uc73c\ub85c \uc785\ub825\ud574\uc57c \ud55c\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,p.kt)("h3",{id:"memory"},"Memory"),(0,p.kt)("p",null,"\uba54\ubaa8\ub9ac\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},139:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},2687:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file diff --git a/assets/js/3f2d0791.1dcf3050.js b/assets/js/3f2d0791.f960958f.js similarity index 99% rename from assets/js/3f2d0791.1dcf3050.js rename to assets/js/3f2d0791.f960958f.js index 03a420f7..fa54fbfa 100644 --- a/assets/js/3f2d0791.1dcf3050.js +++ b/assets/js/3f2d0791.f960958f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5435],{3905:(e,t,r)=>{r.d(t,{Zo:()=>k,kt:()=>s});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var p=n.createContext({}),c=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},k=function(e){var t=c(e.components);return n.createElement(p.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),m=c(r),u=a,s=m["".concat(p,".").concat(u)]||m[u]||d[u]||l;return r?n.createElement(s,o(o({ref:t},k),{},{components:r})):n.createElement(s,o({ref:t},k))}));function s(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[m]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const l={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/images",id:"prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/docs/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/docs/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/images.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/docs/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/docs/prerequisites/docker/advanced"}},p={},c=[{value:"1. Dockerfile \ub9cc\ub4e4\uae30",id:"1-dockerfile-\ub9cc\ub4e4\uae30",level:2},{value:"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4",id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4",level:2},{value:"FROM",id:"from",level:3},{value:"COPY",id:"copy",level:3},{value:"RUN",id:"run",level:3},{value:"CMD",id:"cmd",level:3},{value:"WORKDIR",id:"workdir",level:3},{value:"ENV",id:"env",level:3},{value:"EXPOSE",id:"expose",level:3},{value:"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30",id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30",level:2},{value:"4. Docker build from Dockerfile",id:"4-docker-build-from-dockerfile",level:2},{value:"5. Docker run from Dockerfile",id:"5-docker-run-from-dockerfile",level:2},{value:"6. Docker run with env",id:"6-docker-run-with-env",level:2}],k={toc:c},m="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},k,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-dockerfile-\ub9cc\ub4e4\uae30"},"1. Dockerfile \ub9cc\ub4e4\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 \uac00\uc7a5 \uc26c\uc6b4 \ubc29\ubc95\uc740 \ub3c4\ucee4\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf\uc778 Dockerfile\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc678\uc5d0\ub294 running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker commit")," \ub4f1\uc744 \ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\uc790\uac00 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc27d\uac8c \ub9cc\ub4e4 \uc218 \uc788\ub3c4\ub85d, \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf"),(0,a.kt)("li",{parentName:"ul"},"\ud30c\uc77c\uba85\uc740 \uaf2d ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc774 \uc544\ub2c8\uc5b4\ub3c4 \uc0c1\uad00\uc5c6\uc9c0\ub9cc, ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \uc218\ud589 \uc2dc, default \ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc77c\uba85\uc774 ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ul"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \ub97c \uc218\ud589\ud560 \ub54c, ",(0,a.kt)("inlineCode",{parentName:"li"},"-f")," \uc635\uc158\uc744 \uc8fc\uba74 \ub2e4\ub978 \ud30c\uc77c\uba85\uc73c\ub85c\ub3c4 \uc0ac\uc6a9 \uac00\ub2a5\ud569\ub2c8\ub2e4.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"ex) ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build -f dockerfile-asdf .")," \ub3c4 \uac00\ub2a5")))))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc2e4\uc2b5\uc744 \uc704\ud574\uc11c \ud3b8\ud55c \ub514\ub809\ud1a0\ub9ac\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd \n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ub77c\ub294 \uc774\ub984\uc758 \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Dockerfile \uc774\ub77c\ub294 \ube48 \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"touch Dockerfile\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")))),(0,a.kt)("h2",{id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4"},"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4"),(0,a.kt)("p",null,"Dockerfile \uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8\uc801\uc778 \uba85\ub839\uc5b4\uc5d0 \ub300\ud574\uc11c \ud558\ub098\uc529 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"from"},"FROM"),(0,a.kt)("p",null,"Dockerfile \uc774 base image \ub85c \uc5b4\ub5a0\ud55c \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c, \uc544\ubb34\uac83\ub3c4 \uc5c6\ub294 \ube48 \ud658\uacbd\uc5d0\uc11c\ubd80\ud130 \ud558\ub098\ud558\ub098\uc529 \uc81c\uac00 \uc758\ub3c4\ud55c \ud658\uacbd\uc744 \ub9cc\ub4e4\uc5b4\uac00\ub294\uac8c \uc544\ub2c8\ub77c, python 3.9 \ubc84\uc804\uc774 \uc124\uce58\ub41c \ud658\uacbd\uc744 \ubca0\uc774\uc2a4\ub85c\ud574\ub450\uace0, \uc800\ub294 pytorch \ub97c \uc124\uce58\ud558\uace0, \uc81c \uc18c\uc2a4\ucf54\ub4dc\ub9cc \ub123\uc5b4\ub450\ub294 \ud615\ud0dc\ub85c \ud65c\uc6a9\ud560 \uc218\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \uacbd\uc6b0\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", ... \ub4f1\uc758 \uc798 \ub9cc\ub4e4\uc5b4\uc9c4 \uc774\ubbf8\uc9c0\ub97c \ubca0\uc774\uc2a4\ub85c \ud65c\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,a.kt)("h3",{id:"copy"},"COPY"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"host(\ub85c\uceec)\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc758 \ud30c\uc77c \ud639\uc740 \ub514\ub809\ud1a0\ub9ac\ub97c ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc5d0 \ubcf5\uc0ac\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ADD")," \ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uc640 \ube44\uc2b7\ud558\uc9c0\ub9cc \ucd94\uac00\uc801\uc778 \uae30\ub2a5\uc744 \ud488\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,a.kt)("h3",{id:"run"},"RUN"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \ud574\ub2f9 \ucee4\ub9e8\ub4dc\ub4e4\uc774 \uc2e4\ud589\ub41c \uc0c1\ud0dc\ub97c \uc720\uc9c0\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,a.kt)("h3",{id:"cmd"},"CMD"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 ",(0,a.kt)("strong",{parentName:"p"},"\uc2dc\uc791\ub420 \ub54c"),", \uc2e4\ud589\ud558\ub294 \uac83\uc744 \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud558\ub294 \uba85\ub839\uc5b4\ub85c ",(0,a.kt)("strong",{parentName:"p"},"ENTRYPOINT")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub458\uc758 \ucc28\uc774\uc5d0 \ub300\ud574\uc11c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ub4a4\uc5d0\uc11c")," \ub2e4\ub8f9\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\ub098\uc758 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc5d0\uc11c\ub294 \ud558\ub098\uc758 ",(0,a.kt)("strong",{parentName:"p"},"CMD")," \ub9cc \uc2e4\ud589\ud560 \uc218 \uc788\ub2e4\ub294 \uc810\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"RUN")," \uba85\ub839\uc5b4\uc640 \ub2e4\ub985\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,a.kt)("h3",{id:"workdir"},"WORKDIR"),(0,a.kt)("p",null,"\uc774\ud6c4 \ucd94\uac00\ub420 \uba85\ub839\uc5b4\ub97c \ucee8\ud14c\uc774\ub108 \ub0b4\uc758 \uc5b4\ub5a4 \ub514\ub809\ud1a0\ub9ac\uc5d0\uc11c \uc218\ud589\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud574\ub2f9 \ub514\ub809\ud1a0\ub9ac\uac00 \uc5c6\ub2e4\uba74 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,a.kt)("h3",{id:"env"},"ENV"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc9c0\uc18d\uc801\uc73c\ub85c \uc0ac\uc6a9\ub420 environment variable \uc758 \uac12\uc744 \uc124\uc815\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,a.kt)("h3",{id:"expose"},"EXPOSE"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub6ab\uc5b4\uc904 \ud3ec\ud2b8/\ud504\ub85c\ud1a0\ucf5c\uc744 \uc9c0\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("inlineCode",{parentName:"p"},"")," \uc744 \uc9c0\uc815\ud558\uc9c0 \uc54a\uc73c\uba74 TCP \uac00 \ub514\ud3f4\ud2b8\ub85c \uc124\uc815\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,a.kt)("h2",{id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30"},"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," \ud639\uc740 vscode \ub4f1 \ubcf8\uc778\uc774 \uc0ac\uc6a9\ud558\ub294 \ud3b8\uc9d1\uae30\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,a.kt)("h2",{id:"4-docker-build-from-dockerfile"},"4. Docker build from Dockerfile"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker build")," \uba85\ub839\uc5b4\ub85c Dockerfile \ub85c\ubd80\ud130 Docker Image \ub97c \ub9cc\ub4e4\uc5b4\ubd05\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,a.kt)("p",null,"Dockerfile \uc774 \uc788\ub294 \uacbd\ub85c\uc5d0\uc11c \ub2e4\uc74c \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,a.kt)("p",null,"\uc704 \ucee4\ub9e8\ub4dc\ub97c \uc124\uba85\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},".")," : ",(0,a.kt)("strong",{parentName:"li"},"\ud604\uc7ac \uacbd\ub85c"),"\uc5d0 \uc788\ub294 Dockerfile \ub85c\ubd80\ud130"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"-t")," : my-image \ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ub984"),"\uacfc v1.0.0 \uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\ud0dc\uadf8"),"\ub85c ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ubbf8\uc9c0"),"\ub97c"),(0,a.kt)("li",{parentName:"ul"},"\ube4c\ub4dc\ud558\uaca0\ub2e4\ub77c\ub294 \uba85\ub839\uc5b4")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc774\ubbf8\uc9c0 \ube4c\ub4dc\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,a.kt)("h2",{id:"5-docker-run-from-dockerfile"},"5. Docker run from Dockerfile"),(0,a.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub85c docker \ucee8\ud14c\uc774\ub108\ub97c ",(0,a.kt)("strong",{parentName:"p"},"run")," \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,a.kt)("h2",{id:"6-docker-run-with-env"},"6. Docker run with env"),(0,a.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub97c \uc2e4\ud589\ud558\ub294 \uc2dc\uc810\uc5d0, ",(0,a.kt)("inlineCode",{parentName:"p"},"TEST")," env var \uc758 \uac12\uc744 \ubcc0\uacbd\ud558\uc5ec docker \ucee8\ud14c\uc774\ub108\ub97c run \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5435],{3905:(e,t,r)=>{r.d(t,{Zo:()=>k,kt:()=>s});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var p=n.createContext({}),c=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},k=function(e){var t=c(e.components);return n.createElement(p.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),m=c(r),u=a,s=m["".concat(p,".").concat(u)]||m[u]||d[u]||l;return r?n.createElement(s,o(o({ref:t},k),{},{components:r})):n.createElement(s,o({ref:t},k))}));function s(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[m]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const l={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/images",id:"prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/docs/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/docs/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/images.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/docs/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/docs/prerequisites/docker/advanced"}},p={},c=[{value:"1. Dockerfile \ub9cc\ub4e4\uae30",id:"1-dockerfile-\ub9cc\ub4e4\uae30",level:2},{value:"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4",id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4",level:2},{value:"FROM",id:"from",level:3},{value:"COPY",id:"copy",level:3},{value:"RUN",id:"run",level:3},{value:"CMD",id:"cmd",level:3},{value:"WORKDIR",id:"workdir",level:3},{value:"ENV",id:"env",level:3},{value:"EXPOSE",id:"expose",level:3},{value:"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30",id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30",level:2},{value:"4. Docker build from Dockerfile",id:"4-docker-build-from-dockerfile",level:2},{value:"5. Docker run from Dockerfile",id:"5-docker-run-from-dockerfile",level:2},{value:"6. Docker run with env",id:"6-docker-run-with-env",level:2}],k={toc:c},m="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},k,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-dockerfile-\ub9cc\ub4e4\uae30"},"1. Dockerfile \ub9cc\ub4e4\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 \uac00\uc7a5 \uc26c\uc6b4 \ubc29\ubc95\uc740 \ub3c4\ucee4\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf\uc778 Dockerfile\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc678\uc5d0\ub294 running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker commit")," \ub4f1\uc744 \ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\uc790\uac00 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc27d\uac8c \ub9cc\ub4e4 \uc218 \uc788\ub3c4\ub85d, \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf"),(0,a.kt)("li",{parentName:"ul"},"\ud30c\uc77c\uba85\uc740 \uaf2d ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc774 \uc544\ub2c8\uc5b4\ub3c4 \uc0c1\uad00\uc5c6\uc9c0\ub9cc, ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \uc218\ud589 \uc2dc, default \ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc77c\uba85\uc774 ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ul"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \ub97c \uc218\ud589\ud560 \ub54c, ",(0,a.kt)("inlineCode",{parentName:"li"},"-f")," \uc635\uc158\uc744 \uc8fc\uba74 \ub2e4\ub978 \ud30c\uc77c\uba85\uc73c\ub85c\ub3c4 \uc0ac\uc6a9 \uac00\ub2a5\ud569\ub2c8\ub2e4.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"ex) ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build -f dockerfile-asdf .")," \ub3c4 \uac00\ub2a5")))))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc2e4\uc2b5\uc744 \uc704\ud574\uc11c \ud3b8\ud55c \ub514\ub809\ud1a0\ub9ac\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd \n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ub77c\ub294 \uc774\ub984\uc758 \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Dockerfile \uc774\ub77c\ub294 \ube48 \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"touch Dockerfile\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")))),(0,a.kt)("h2",{id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4"},"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4"),(0,a.kt)("p",null,"Dockerfile \uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8\uc801\uc778 \uba85\ub839\uc5b4\uc5d0 \ub300\ud574\uc11c \ud558\ub098\uc529 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"from"},"FROM"),(0,a.kt)("p",null,"Dockerfile \uc774 base image \ub85c \uc5b4\ub5a0\ud55c \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c, \uc544\ubb34\uac83\ub3c4 \uc5c6\ub294 \ube48 \ud658\uacbd\uc5d0\uc11c\ubd80\ud130 \ud558\ub098\ud558\ub098\uc529 \uc81c\uac00 \uc758\ub3c4\ud55c \ud658\uacbd\uc744 \ub9cc\ub4e4\uc5b4\uac00\ub294\uac8c \uc544\ub2c8\ub77c, python 3.9 \ubc84\uc804\uc774 \uc124\uce58\ub41c \ud658\uacbd\uc744 \ubca0\uc774\uc2a4\ub85c\ud574\ub450\uace0, \uc800\ub294 pytorch \ub97c \uc124\uce58\ud558\uace0, \uc81c \uc18c\uc2a4\ucf54\ub4dc\ub9cc \ub123\uc5b4\ub450\ub294 \ud615\ud0dc\ub85c \ud65c\uc6a9\ud560 \uc218\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \uacbd\uc6b0\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", ... \ub4f1\uc758 \uc798 \ub9cc\ub4e4\uc5b4\uc9c4 \uc774\ubbf8\uc9c0\ub97c \ubca0\uc774\uc2a4\ub85c \ud65c\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,a.kt)("h3",{id:"copy"},"COPY"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"host(\ub85c\uceec)\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc758 \ud30c\uc77c \ud639\uc740 \ub514\ub809\ud1a0\ub9ac\ub97c ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc5d0 \ubcf5\uc0ac\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ADD")," \ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uc640 \ube44\uc2b7\ud558\uc9c0\ub9cc \ucd94\uac00\uc801\uc778 \uae30\ub2a5\uc744 \ud488\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,a.kt)("h3",{id:"run"},"RUN"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \ud574\ub2f9 \ucee4\ub9e8\ub4dc\ub4e4\uc774 \uc2e4\ud589\ub41c \uc0c1\ud0dc\ub97c \uc720\uc9c0\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,a.kt)("h3",{id:"cmd"},"CMD"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 ",(0,a.kt)("strong",{parentName:"p"},"\uc2dc\uc791\ub420 \ub54c"),", \uc2e4\ud589\ud558\ub294 \uac83\uc744 \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud558\ub294 \uba85\ub839\uc5b4\ub85c ",(0,a.kt)("strong",{parentName:"p"},"ENTRYPOINT")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub458\uc758 \ucc28\uc774\uc5d0 \ub300\ud574\uc11c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ub4a4\uc5d0\uc11c")," \ub2e4\ub8f9\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\ub098\uc758 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc5d0\uc11c\ub294 \ud558\ub098\uc758 ",(0,a.kt)("strong",{parentName:"p"},"CMD")," \ub9cc \uc2e4\ud589\ud560 \uc218 \uc788\ub2e4\ub294 \uc810\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"RUN")," \uba85\ub839\uc5b4\uc640 \ub2e4\ub985\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,a.kt)("h3",{id:"workdir"},"WORKDIR"),(0,a.kt)("p",null,"\uc774\ud6c4 \ucd94\uac00\ub420 \uba85\ub839\uc5b4\ub97c \ucee8\ud14c\uc774\ub108 \ub0b4\uc758 \uc5b4\ub5a4 \ub514\ub809\ud1a0\ub9ac\uc5d0\uc11c \uc218\ud589\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud574\ub2f9 \ub514\ub809\ud1a0\ub9ac\uac00 \uc5c6\ub2e4\uba74 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,a.kt)("h3",{id:"env"},"ENV"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc9c0\uc18d\uc801\uc73c\ub85c \uc0ac\uc6a9\ub420 environment variable \uc758 \uac12\uc744 \uc124\uc815\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,a.kt)("h3",{id:"expose"},"EXPOSE"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub6ab\uc5b4\uc904 \ud3ec\ud2b8/\ud504\ub85c\ud1a0\ucf5c\uc744 \uc9c0\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("inlineCode",{parentName:"p"},"")," \uc744 \uc9c0\uc815\ud558\uc9c0 \uc54a\uc73c\uba74 TCP \uac00 \ub514\ud3f4\ud2b8\ub85c \uc124\uc815\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,a.kt)("h2",{id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30"},"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," \ud639\uc740 vscode \ub4f1 \ubcf8\uc778\uc774 \uc0ac\uc6a9\ud558\ub294 \ud3b8\uc9d1\uae30\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,a.kt)("h2",{id:"4-docker-build-from-dockerfile"},"4. Docker build from Dockerfile"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker build")," \uba85\ub839\uc5b4\ub85c Dockerfile \ub85c\ubd80\ud130 Docker Image \ub97c \ub9cc\ub4e4\uc5b4\ubd05\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,a.kt)("p",null,"Dockerfile \uc774 \uc788\ub294 \uacbd\ub85c\uc5d0\uc11c \ub2e4\uc74c \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,a.kt)("p",null,"\uc704 \ucee4\ub9e8\ub4dc\ub97c \uc124\uba85\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},".")," : ",(0,a.kt)("strong",{parentName:"li"},"\ud604\uc7ac \uacbd\ub85c"),"\uc5d0 \uc788\ub294 Dockerfile \ub85c\ubd80\ud130"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"-t")," : my-image \ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ub984"),"\uacfc v1.0.0 \uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\ud0dc\uadf8"),"\ub85c ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ubbf8\uc9c0"),"\ub97c"),(0,a.kt)("li",{parentName:"ul"},"\ube4c\ub4dc\ud558\uaca0\ub2e4\ub77c\ub294 \uba85\ub839\uc5b4")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc774\ubbf8\uc9c0 \ube4c\ub4dc\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,a.kt)("h2",{id:"5-docker-run-from-dockerfile"},"5. Docker run from Dockerfile"),(0,a.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub85c docker \ucee8\ud14c\uc774\ub108\ub97c ",(0,a.kt)("strong",{parentName:"p"},"run")," \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,a.kt)("h2",{id:"6-docker-run-with-env"},"6. Docker run with env"),(0,a.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub97c \uc2e4\ud589\ud558\ub294 \uc2dc\uc810\uc5d0, ",(0,a.kt)("inlineCode",{parentName:"p"},"TEST")," env var \uc758 \uac12\uc744 \ubcc0\uacbd\ud558\uc5ec docker \ucee8\ud14c\uc774\ub108\ub97c run \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/44d64813.26178d88.js b/assets/js/44d64813.89e8eda4.js similarity index 99% rename from assets/js/44d64813.26178d88.js rename to assets/js/44d64813.89e8eda4.js index b4c02e6a..823a4688 100644 --- a/assets/js/44d64813.26178d88.js +++ b/assets/js/44d64813.89e8eda4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4023],{3905:(n,e,t)=>{t.d(e,{Zo:()=>p,kt:()=>_});var a=t(7294);function l(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function r(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(l[t]=n[t]);return l}(n,e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(l[t]=n[t])}return l}var s=a.createContext({}),m=function(n){var e=a.useContext(s),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},p=function(n){var e=m(n.components);return a.createElement(s.Provider,{value:e},n.children)},d="mdxType",c={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,l=n.mdxType,r=n.originalType,s=n.parentName,p=o(n,["components","mdxType","originalType","parentName"]),d=m(t),u=l,_=d["".concat(s,".").concat(u)]||d[u]||c[u]||r;return t?a.createElement(_,i(i({ref:e},p),{},{components:t})):a.createElement(_,i({ref:e},p))}));function _(n,e){var t=arguments,l=e&&e.mdxType;if("string"==typeof n||l){var r=t.length,i=new Array(r);i[0]=u;var o={};for(var s in e)hasOwnProperty.call(e,s)&&(o[s]=e[s]);o.originalType=n,o[d]="string"==typeof n?n:l,i[1]=o;for(var m=2;m{t.r(e),t.d(e,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},i=void 0,o={unversionedId:"api-deployment/seldon-children",id:"api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/docs/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/docs/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-children.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/docs/api-deployment/seldon-mlflow"},next:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/appendix/pyenv"}},s={},m=[{value:"Multi Models",id:"multi-models",level:2},{value:"Pipeline",id:"pipeline",level:2}],p={toc:m},d="wrapper";function c(n){let{components:e,...r}=n;return(0,l.kt)(d,(0,a.Z)({},p,r,{components:e,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"multi-models"},"Multi Models"),(0,l.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud588\ub358 \ubc29\ubc95\ub4e4\uc740 \ubaa8\ub450 \ub2e8\uc77c \ubaa8\ub378\uc744 \ub300\uc0c1\uc73c\ub85c \ud588\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc5ec\ub7ec \uac1c\uc758 \ubaa8\ub378\uc744 \uc5f0\uacb0\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pipeline"},"Pipeline"),(0,l.kt)("p",null,"\uc6b0\uc120 \ubaa8\ub378\uc744 2\uac1c\ub97c \uc0dd\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc740 \uc55e\uc11c \uc0ac\uc6a9\ud55c SVC \ubaa8\ub378\uc5d0 StandardScaler\ub97c \ucd94\uac00\ud558\uace0 \uc800\uc7a5\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(9023).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"MLflow \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ubaa8\ub378\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(4483).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"\uac01\uac01\uc758 run_id\ub97c \ud655\uc778 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc774 SeldonDeployment \uc2a4\ud399\uc744 \uc815\uc758\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \ub450 \uac1c\uac00 \ub418\uc5c8\uc73c\ubbc0\ub85c \uac01 \ubaa8\ub378\uc758 initContainer\uc640 container\ub97c \uc815\uc758\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774 \ud544\ub4dc\ub294 \uc785\ub825\uac12\uc744 array\ub85c \ubc1b\uc73c\uba70 \uc21c\uc11c\ub294 \uad00\uacc4\uc5c6\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \uc2e4\ud589\ud558\ub294 \uc21c\uc11c\ub294 graph\uc5d0\uc11c \uc815\uc758\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"graph\uc758 \ub3d9\uc791 \ubc29\uc2dd\uc740 \ucc98\uc74c \ubc1b\uc740 \uac12\uc744 \uc815\ud574\uc9c4 predict_method\ub85c \ubcc0\ud658\ud55c \ub4a4 children\uc73c\ub85c \uc815\uc758\ub41c \ubaa8\ub378\uc5d0 \uc804\ub2ec\ud558\ub294 \ubc29\uc2dd\uc785\ub2c8\ub2e4.\n\uc774 \uacbd\uc6b0 scaler -> svc \ub85c \ub370\uc774\ud130\uac00 \uc804\ub2ec\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\uc81c \uc704\uc758 \uc2a4\ud399\uc744 yaml\ud30c\uc77c\ub85c \uc0dd\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 API\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub410\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c pod\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},9023:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},4483:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4023],{3905:(n,e,t)=>{t.d(e,{Zo:()=>p,kt:()=>_});var a=t(7294);function l(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function r(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(l[t]=n[t]);return l}(n,e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(l[t]=n[t])}return l}var s=a.createContext({}),m=function(n){var e=a.useContext(s),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},p=function(n){var e=m(n.components);return a.createElement(s.Provider,{value:e},n.children)},d="mdxType",c={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,l=n.mdxType,r=n.originalType,s=n.parentName,p=o(n,["components","mdxType","originalType","parentName"]),d=m(t),u=l,_=d["".concat(s,".").concat(u)]||d[u]||c[u]||r;return t?a.createElement(_,i(i({ref:e},p),{},{components:t})):a.createElement(_,i({ref:e},p))}));function _(n,e){var t=arguments,l=e&&e.mdxType;if("string"==typeof n||l){var r=t.length,i=new Array(r);i[0]=u;var o={};for(var s in e)hasOwnProperty.call(e,s)&&(o[s]=e[s]);o.originalType=n,o[d]="string"==typeof n?n:l,i[1]=o;for(var m=2;m{t.r(e),t.d(e,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},i=void 0,o={unversionedId:"api-deployment/seldon-children",id:"api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/docs/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/docs/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-children.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/docs/api-deployment/seldon-mlflow"},next:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/appendix/pyenv"}},s={},m=[{value:"Multi Models",id:"multi-models",level:2},{value:"Pipeline",id:"pipeline",level:2}],p={toc:m},d="wrapper";function c(n){let{components:e,...r}=n;return(0,l.kt)(d,(0,a.Z)({},p,r,{components:e,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"multi-models"},"Multi Models"),(0,l.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud588\ub358 \ubc29\ubc95\ub4e4\uc740 \ubaa8\ub450 \ub2e8\uc77c \ubaa8\ub378\uc744 \ub300\uc0c1\uc73c\ub85c \ud588\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc5ec\ub7ec \uac1c\uc758 \ubaa8\ub378\uc744 \uc5f0\uacb0\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pipeline"},"Pipeline"),(0,l.kt)("p",null,"\uc6b0\uc120 \ubaa8\ub378\uc744 2\uac1c\ub97c \uc0dd\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc740 \uc55e\uc11c \uc0ac\uc6a9\ud55c SVC \ubaa8\ub378\uc5d0 StandardScaler\ub97c \ucd94\uac00\ud558\uace0 \uc800\uc7a5\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(9023).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"MLflow \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ubaa8\ub378\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(4483).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"\uac01\uac01\uc758 run_id\ub97c \ud655\uc778 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc774 SeldonDeployment \uc2a4\ud399\uc744 \uc815\uc758\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \ub450 \uac1c\uac00 \ub418\uc5c8\uc73c\ubbc0\ub85c \uac01 \ubaa8\ub378\uc758 initContainer\uc640 container\ub97c \uc815\uc758\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774 \ud544\ub4dc\ub294 \uc785\ub825\uac12\uc744 array\ub85c \ubc1b\uc73c\uba70 \uc21c\uc11c\ub294 \uad00\uacc4\uc5c6\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \uc2e4\ud589\ud558\ub294 \uc21c\uc11c\ub294 graph\uc5d0\uc11c \uc815\uc758\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"graph\uc758 \ub3d9\uc791 \ubc29\uc2dd\uc740 \ucc98\uc74c \ubc1b\uc740 \uac12\uc744 \uc815\ud574\uc9c4 predict_method\ub85c \ubcc0\ud658\ud55c \ub4a4 children\uc73c\ub85c \uc815\uc758\ub41c \ubaa8\ub378\uc5d0 \uc804\ub2ec\ud558\ub294 \ubc29\uc2dd\uc785\ub2c8\ub2e4.\n\uc774 \uacbd\uc6b0 scaler -> svc \ub85c \ub370\uc774\ud130\uac00 \uc804\ub2ec\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\uc81c \uc704\uc758 \uc2a4\ud399\uc744 yaml\ud30c\uc77c\ub85c \uc0dd\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 API\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub410\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c pod\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},9023:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},4483:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file diff --git a/assets/js/4579a793.052cd2c0.js b/assets/js/4579a793.91381a61.js similarity index 99% rename from assets/js/4579a793.052cd2c0.js rename to assets/js/4579a793.91381a61.js index fff17cf2..05608329 100644 --- a/assets/js/4579a793.052cd2c0.js +++ b/assets/js/4579a793.91381a61.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8737],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>c});var a=t(7294);function p(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(p[t]=e[t]);return p}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(p[t]=e[t])}return p}var o=a.createContext({}),u=function(e){var n=a.useContext(o),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},m=function(e){var n=u(e.components);return a.createElement(o.Provider,{value:n},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,p=e.mdxType,r=e.originalType,o=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),s=u(t),_=p,c=s["".concat(o,".").concat(_)]||s[_]||d[_]||r;return t?a.createElement(c,i(i({ref:n},m),{},{components:t})):a.createElement(c,i({ref:n},m))}));function c(e,n){var t=arguments,p=n&&n.mdxType;if("string"==typeof e||p){var r=t.length,i=new Array(r);i[0]=_;var l={};for(var o in n)hasOwnProperty.call(n,o)&&(l[o]=n[o]);l.originalType=e,l[s]="string"==typeof e?e:p,i[1]=l;for(var u=2;u{t.r(n),t.d(n,{assets:()=>o,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>u});var a=t(7462),p=(t(7294),t(3905));const r={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow/advanced-run",id:"kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/docs/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/docs/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/docs/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/docs/kubeflow/advanced-mlflow"}},o={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],m={toc:u},s="wrapper";function d(e){let{components:n,...r}=e;return(0,p.kt)(s,(0,a.Z)({},m,r,{components:n,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"run-result"},"Run Result"),(0,p.kt)("p",null,"Run \uc2e4\ud589 \uacb0\uacfc\ub97c \ub20c\ub7ec\ubcf4\uba74 3\uac1c\uc758 \ud0ed\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4.\n\uac01\uac01 Graph, Run output, Config \uc785\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-0.png",src:t(57).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"graph"},"Graph"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-1.png",src:t(7834).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub798\ud504\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub204\ub974\uba74 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,p.kt)("p",null,"Input/Output \ud0ed\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud55c Config\ub4e4\uacfc Input, Output Artifacts\ub97c \ud655\uc778\ud558\uace0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"logs"},"Logs"),(0,p.kt)("p",null,"Logs\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc \uc2e4\ud589 \uc911 \ub098\uc624\ub294 \ubaa8\ub4e0 stdout\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\ub2e4\ub9cc pod\uc740 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub09c \ud6c4 \uc9c0\uc6cc\uc9c0\uae30 \ub54c\ubb38\uc5d0 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub098\uba74 \uc774 \ud0ed\uc5d0\uc11c\ub294 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.\n\uc774\ub54c\ub294 Output artifacts\uc758 main-logs\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"visualizations"},"Visualizations"),(0,p.kt)("p",null,"Visualizations\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0dd\uc131\ub41c \ud50c\ub78f\uc744 \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud50c\ub78f\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")')," argument\ub85c \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uac12\uc744 \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4. \uc774 \ub54c \ud50c\ub78f\uc758 \ud615\ud0dc\ub294 html \ud3ec\ub9f7\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,p.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"plot_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Visualization\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-5.png",src:t(3013).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"run-output"},"Run output"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-2.png",src:t(3979).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Run output\uc740 kubeflow\uc5d0\uc11c \uc9c0\uc815\ud55c \ud615\ud0dc\ub85c \uc0dd\uae34 Artifacts\ub97c \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uacf3\uc774\uba70 \ud3c9\uac00 \uc9c0\ud45c(Metric)\ub97c \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c(Metric)\uc744 \ubcf4\uc5ec\uc8fc\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument\uc5d0 \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uc774\ub984\uacfc \uac12\uc744 json \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c ",(0,p.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-pipeline"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc5d0\uc11c \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc5d0 \ucd94\uac00 \ud6c4 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n\uc804\uccb4 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Run Output\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-4.png",src:t(6250).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"config"},"Config"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-3.png",src:t(3570).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Config\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc785\ub825\ubc1b\uc740 \ubaa8\ub4e0 \uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},57:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},7834:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},3979:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},3570:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},6250:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},3013:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8737],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>c});var a=t(7294);function p(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(p[t]=e[t]);return p}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(p[t]=e[t])}return p}var o=a.createContext({}),u=function(e){var n=a.useContext(o),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},m=function(e){var n=u(e.components);return a.createElement(o.Provider,{value:n},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,p=e.mdxType,r=e.originalType,o=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),s=u(t),_=p,c=s["".concat(o,".").concat(_)]||s[_]||d[_]||r;return t?a.createElement(c,i(i({ref:n},m),{},{components:t})):a.createElement(c,i({ref:n},m))}));function c(e,n){var t=arguments,p=n&&n.mdxType;if("string"==typeof e||p){var r=t.length,i=new Array(r);i[0]=_;var l={};for(var o in n)hasOwnProperty.call(n,o)&&(l[o]=n[o]);l.originalType=e,l[s]="string"==typeof e?e:p,i[1]=l;for(var u=2;u{t.r(n),t.d(n,{assets:()=>o,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>u});var a=t(7462),p=(t(7294),t(3905));const r={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow/advanced-run",id:"kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/docs/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/docs/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/docs/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/docs/kubeflow/advanced-mlflow"}},o={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],m={toc:u},s="wrapper";function d(e){let{components:n,...r}=e;return(0,p.kt)(s,(0,a.Z)({},m,r,{components:n,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"run-result"},"Run Result"),(0,p.kt)("p",null,"Run \uc2e4\ud589 \uacb0\uacfc\ub97c \ub20c\ub7ec\ubcf4\uba74 3\uac1c\uc758 \ud0ed\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4.\n\uac01\uac01 Graph, Run output, Config \uc785\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-0.png",src:t(57).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"graph"},"Graph"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-1.png",src:t(7834).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub798\ud504\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub204\ub974\uba74 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,p.kt)("p",null,"Input/Output \ud0ed\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud55c Config\ub4e4\uacfc Input, Output Artifacts\ub97c \ud655\uc778\ud558\uace0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"logs"},"Logs"),(0,p.kt)("p",null,"Logs\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc \uc2e4\ud589 \uc911 \ub098\uc624\ub294 \ubaa8\ub4e0 stdout\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\ub2e4\ub9cc pod\uc740 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub09c \ud6c4 \uc9c0\uc6cc\uc9c0\uae30 \ub54c\ubb38\uc5d0 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub098\uba74 \uc774 \ud0ed\uc5d0\uc11c\ub294 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.\n\uc774\ub54c\ub294 Output artifacts\uc758 main-logs\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"visualizations"},"Visualizations"),(0,p.kt)("p",null,"Visualizations\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0dd\uc131\ub41c \ud50c\ub78f\uc744 \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud50c\ub78f\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")')," argument\ub85c \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uac12\uc744 \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4. \uc774 \ub54c \ud50c\ub78f\uc758 \ud615\ud0dc\ub294 html \ud3ec\ub9f7\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,p.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"plot_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Visualization\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-5.png",src:t(3013).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"run-output"},"Run output"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-2.png",src:t(3979).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Run output\uc740 kubeflow\uc5d0\uc11c \uc9c0\uc815\ud55c \ud615\ud0dc\ub85c \uc0dd\uae34 Artifacts\ub97c \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uacf3\uc774\uba70 \ud3c9\uac00 \uc9c0\ud45c(Metric)\ub97c \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c(Metric)\uc744 \ubcf4\uc5ec\uc8fc\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument\uc5d0 \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uc774\ub984\uacfc \uac12\uc744 json \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c ",(0,p.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-pipeline"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc5d0\uc11c \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc5d0 \ucd94\uac00 \ud6c4 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n\uc804\uccb4 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Run Output\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-4.png",src:t(6250).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"config"},"Config"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-3.png",src:t(3570).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Config\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc785\ub825\ubc1b\uc740 \ubaa8\ub4e0 \uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},57:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},7834:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},3979:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},3570:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},6250:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},3013:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file diff --git a/assets/js/48eb1972.0629ff22.js b/assets/js/48eb1972.e4c80eb1.js similarity index 99% rename from assets/js/48eb1972.0629ff22.js rename to assets/js/48eb1972.e4c80eb1.js index a40a4005..9a68da05 100644 --- a/assets/js/48eb1972.0629ff22.js +++ b/assets/js/48eb1972.e4c80eb1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7651],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),d=a,m=u["".concat(p,".").concat(d)]||u[d]||k[d]||i;return n?r.createElement(m,o(o({ref:t},c),{},{components:n})):r.createElement(m,o({ref:t},c))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,o=new Array(i);o[0]=d;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:a,o[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var r=n(7462),a=(n(7294),n(3905));const i={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,l={unversionedId:"prerequisites/docker/introduction",id:"prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/docs/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/docs/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/introduction.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/docs/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/docs/prerequisites/docker/"}},p={},s=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:2},{value:"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984",id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984",level:3},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:4},{value:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30",id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30",level:3},{value:"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"References",id:"references",level:3}],c={toc:s},u="wrapper";function k(e){let{components:t,...i}=e;return(0,a.kt)(u,(0,r.Z)({},c,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,a.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc11c\ube44\uc2a4\ud654\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378 \uac1c\ubc1c \uc678\uc5d0\ub3c4 \ub9ce\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ubd80\uac00\uc801\uc778")," \uae30\ub2a5\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ud559\uc2b5 \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ubaa8\ub378 \ud559\uc2b5 \uba85\ub839\uc758 \uc2a4\ucf00\uc904 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uc758 Reproducibility \ubcf4\uc7a5"))),(0,a.kt)("li",{parentName:"ol"},"\ubc30\ud3ec \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ud2b8\ub798\ud53d \ubd84\uc0b0"),(0,a.kt)("li",{parentName:"ul"},"\uc11c\ube44\uc2a4 \uc7a5\uc560 \ubaa8\ub2c8\ud130\ub9c1"),(0,a.kt)("li",{parentName:"ul"},"\uc7a5\uc560 \uc2dc \ud2b8\ub7ec\ube14\uc288\ud305")))),(0,a.kt)("p",null,"\ub2e4\ud589\ud788\ub3c4 \uc774\ub7f0 \uae30\ub2a5\ub4e4\uc5d0 \ub300\ud55c needs\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \ucabd\uc5d0\uc11c \uc774\ubbf8 \ub9ce\uc740 \uace0\ubbfc\uc744 \uac70\uccd0 \ubc1c\uc804\ub418\uc5b4 \uc654\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud560 \ub54c\ub3c4 \uc774\ub7f0 \uace0\ubbfc\uc758 \uacb0\uacfc\ubb3c\ub4e4\uc744 \ud65c\uc6a9\ud558\uba74 \ud070 \ub3c4\uc6c0\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nMLOps\uc5d0\uc11c \ub300\ud45c\uc801\uc73c\ub85c \ud65c\uc6a9\ud558\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uc774 \ubc14\ub85c \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("h3",{id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984"},"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984"),(0,a.kt)("p",null,"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uac01\uac01 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\ub2a5\uacfc \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158(Container Orchestration) \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ub300\ud45c \uc18c\ud504\ud2b8\uc6e8\uc5b4(\uc81c\ud488)\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,a.kt)("p",null,"\ub3c4\ucee4\ub294 \uacfc\uac70\uc5d0 \ub300\uc138\uc600\uc9c0\ub9cc \uc720\ub8cc\ud654 \uad00\ub828 \uc815\ucc45\ub4e4\uc744 \ud558\ub098\uc529 \ucd94\uac00\ud558\uba74\uc11c \uc810\uc810 \uc0ac\uc6a9 \ube48\ub3c4\uac00 \ud558\ub77d\uc138\uc785\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc 2022\ub144 3\uc6d4 \uae30\uc900\uc73c\ub85c \uc544\uc9c1\uae4c\uc9c0\ub3c4 \uac00\uc7a5 \uc77c\ubc18\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2019.png",src:n(2235).Z,width:"1600",height:"900"})),(0,a.kt)("center",null," [from sysdig 2019] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2021.png",src:n(1256).Z,width:"750",height:"437"})),(0,a.kt)("center",null," [from sysdig 2021] "),(0,a.kt)("h4",{id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uc9c0\uae08\uae4c\uc9c0\ub294 \ube44\uad50 \ub300\uc0c1\uc870\ucc28 \uac70\uc758 \uc5c6\ub294 \uc81c\ud488\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"cncf-survey.png",src:n(6640).Z,width:"2048",height:"1317"})),(0,a.kt)("center",null," [from cncf survey] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"t4-ai.png",src:n(2044).Z,width:"926",height:"629"})),(0,a.kt)("center",null," [from t4.ai] "),(0,a.kt)("h3",{id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30"},(0,a.kt)("strong",{parentName:"h3"},"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30")),(0,a.kt)("h4",{id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucd08\uae30 \ub3c4\ucee4 \uac1c\ubc1c\uc2dc\uc5d0\ub294 Docker Engine\uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc758 \ud328\ud0a4\uc9c0"),"\uc5d0 API, CLI, \ub124\ud2b8\uc6cc\ud06c, \uc2a4\ud1a0\ub9ac\uc9c0 \ub4f1 \uc5ec\ub7ec \uae30\ub2a5\ub4e4\uc744 \ubaa8\ub450 \ud3ec\ud568\ud588\uc73c\ub098, ",(0,a.kt)("strong",{parentName:"p"},"MSA")," \uc758 \ucca0\ud559\uc744 \ub2f4\uc544 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc529 \ubd84\ub9ac"),"\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ucd08\uae30\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654\ub97c \uc704\ud574 Docker Engine\uc744 \ub0b4\uc7a5\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub3c4\ucee4 \ubc84\uc804\uc774 \uc5c5\ub370\uc774\ud2b8\ub420 \ub54c\ub9c8\ub2e4 Docker Engine \uc758 \uc778\ud130\ud398\uc774\uc2a4\uac00 \ubcc0\uacbd\ub418\uc5b4 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \ud06c\uac8c \uc601\ud5a5\uc744 \ubc1b\ub294 \uc77c\uc774 \uacc4\uc18d\ud574\uc11c \ubc1c\uc0dd\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,a.kt)("p",null,"\uadf8\ub798\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc774\ub7f0 \ubd88\ud3b8\ud568\uc744 \ud574\uc18c"),"\ud558\uace0\uc790, \ub3c4\ucee4\ub97c \uc911\uc2ec\uc73c\ub85c \uad6c\uae00 \ub4f1 \ucee8\ud14c\uc774\ub108 \uae30\uc220\uc5d0 \uad00\uc2ec\uc788\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec \uc9d1\ub2e8"),"\ub4e4\uc774 \ud55c\ub370 \ubaa8\uc5ec ",(0,a.kt)("strong",{parentName:"p"},"Open Container Initiative,")," \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"OCI"),"\ub77c\ub294 \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud558\uc5ec \ucee8\ud14c\uc774\ub108\uc5d0 \uad00\ud55c ",(0,a.kt)("strong",{parentName:"p"},"\ud45c\uc900"),"\uc744 \uc815\ud558\ub294 \uc77c\ub4e4\uc744 \uc2dc\uc791\ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\uc5d0\uc11c\ub3c4 \uc778\ud130\ud398\uc774\uc2a4\ub97c ",(0,a.kt)("strong",{parentName:"p"},"\ud55c \ubc88 \ub354 \ubd84\ub9ac"),"\ud574\uc11c, OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\ub294 ",(0,a.kt)("strong",{parentName:"p"},"containerd"),"\ub77c\ub294 Container Runtime \ub97c \uac1c\ubc1c\ud558\uace0, ",(0,a.kt)("strong",{parentName:"p"},"dockerd")," \uac00 containerd \uc758 API \ub97c \ud638\ucd9c\ud558\ub3c4\ub85d \ucd94\uc0c1\ud654 \ub808\uc774\uc5b4\ub97c \ucd94\uac00\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7ec\ud55c \ud750\ub984\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c\ub3c4 \uc774\uc81c\ubd80\ud130\ub294 \ub3c4\ucee4\ub9cc\uc744 \uc9c0\uc6d0\ud558\uc9c0 \uc54a\uace0, ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900\uc744")," \uc900\uc218\ud558\uace0, \uc815\ud574\uc9c4 \uc2a4\ud399\uc744 \uc9c0\ud0a4\ub294 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc740 \ubb34\uc5c7\uc774\ub4e0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d, Container Runtime Interface, \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"CRI \uc2a4\ud399"),"\uc744 \ubc84\uc804 1.5\ubd80\ud130 \uc81c\uacf5\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"cri-o"},"CRI-O"),(0,a.kt)("p",null,"Red Hat, Intel, SUSE, IBM\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900+CRI \uc2a4\ud399\uc744")," \ub530\ub77c Kubernetes \uc804\uc6a9 Container Runtime \uc744 \ubaa9\uc801\uc73c\ub85c \uac1c\ubc1c\ud55c \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 Docker Engine \uc744 \ub514\ud3f4\ud2b8 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc0ac\uc6a9\ud574\uc654\uc9c0\ub9cc, \ub3c4\ucee4\uc758 API \uac00 ",(0,a.kt)("strong",{parentName:"p"},"CRI")," \uc2a4\ud399\uc5d0 \ub9de\uc9c0 \uc54a\uc544(",(0,a.kt)("em",{parentName:"p"},"OCI \ub294 \ub530\ub984"),") \ub3c4\ucee4\uc758 API\ub97c ",(0,a.kt)("strong",{parentName:"p"},"CRI"),"\uc640 \ud638\ud658\ub418\uac8c \ubc14\uafd4\uc8fc\ub294 ",(0,a.kt)("strong",{parentName:"p"},"dockershim"),"\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc790\uccb4\uc801\uc73c\ub85c \uac1c\ubc1c \ubc0f \uc9c0\uc6d0\ud574\uc654\uc5c8\ub294\ub370,(",(0,a.kt)("em",{parentName:"p"},"\ub3c4\ucee4 \uce21\uc774 \uc544\ub2c8\ub77c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uce21\uc5d0\uc11c \uc9c0\uc6d0\ud588\ub2e4\ub294 \uc810\uc774 \uad49\uc7a5\ud788 \ud070 \uc9d0\uc774\uc5c8\uc2b5\ub2c8\ub2e4."),") \uc774\uac78 \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,a.kt)("strong",{parentName:"p"},"v1.20 \ubd80\ud130\ub294 Deprecated\ud558\uace0,")," ",(0,a.kt)("strong",{parentName:"p"},"v1.23 \ubd80\ud130\ub294 \uc9c0\uc6d0\uc744 \ud3ec\uae30"),"\ud558\uae30\ub85c \uacb0\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"v1.23 \uc740 2021 \ub144 12\uc6d4 \ub9b4\ub9ac\uc988")),(0,a.kt)("p",null,"\uadf8\ub798\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 v1.23 \ubd80\ud130\ub294 \ub3c4\ucee4\ub97c native \ud558\uac8c \uc4f8 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc ",(0,a.kt)("strong",{parentName:"p"},"\uc0ac\uc6a9\uc790\ub4e4\uc740 \uc774\ub7f0 \ubcc0\ud654\uc5d0 \ud06c\uac8c \uad00\ub828\uc774 \uc788\uc9c4 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc65c\ub0d0\ud558\uba74 Docker Engine\uc744 \ud1b5\ud574 \ub9cc\ub4e4\uc5b4\uc9c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\uae30 \ub54c\ubb38\uc5d0, \ucfe0\ubc84\ub124\ud2f0\uc2a4\uac00 \uc5b4\ub5a4 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc774\ub8e8\uc5b4\uc838\uc788\ub4e0 \uc0ac\uc6a9 \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,a.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}k.isMDXComponent=!0},6640:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},2235:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},1256:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},2044:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7651],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),d=a,m=u["".concat(p,".").concat(d)]||u[d]||k[d]||i;return n?r.createElement(m,o(o({ref:t},c),{},{components:n})):r.createElement(m,o({ref:t},c))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,o=new Array(i);o[0]=d;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:a,o[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>k,frontMatter:()=>i,metadata:()=>l,toc:()=>s});var r=n(7462),a=(n(7294),n(3905));const i={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,l={unversionedId:"prerequisites/docker/introduction",id:"prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/docs/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/docs/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/introduction.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/docs/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/docs/prerequisites/docker/"}},p={},s=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:2},{value:"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984",id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984",level:3},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:4},{value:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30",id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30",level:3},{value:"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4",id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4",level:4},{value:"References",id:"references",level:3}],c={toc:s},u="wrapper";function k(e){let{components:t,...i}=e;return(0,a.kt)(u,(0,r.Z)({},c,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,a.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uc11c\ube44\uc2a4\ud654\ud558\uae30 \uc704\ud574\uc11c\ub294 \ubaa8\ub378 \uac1c\ubc1c \uc678\uc5d0\ub3c4 \ub9ce\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ubd80\uac00\uc801\uc778")," \uae30\ub2a5\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ud559\uc2b5 \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ubaa8\ub378 \ud559\uc2b5 \uba85\ub839\uc758 \uc2a4\ucf00\uc904 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uc758 Reproducibility \ubcf4\uc7a5"))),(0,a.kt)("li",{parentName:"ol"},"\ubc30\ud3ec \ub2e8\uacc4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\ud2b8\ub798\ud53d \ubd84\uc0b0"),(0,a.kt)("li",{parentName:"ul"},"\uc11c\ube44\uc2a4 \uc7a5\uc560 \ubaa8\ub2c8\ud130\ub9c1"),(0,a.kt)("li",{parentName:"ul"},"\uc7a5\uc560 \uc2dc \ud2b8\ub7ec\ube14\uc288\ud305")))),(0,a.kt)("p",null,"\ub2e4\ud589\ud788\ub3c4 \uc774\ub7f0 \uae30\ub2a5\ub4e4\uc5d0 \ub300\ud55c needs\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uac1c\ubc1c \ucabd\uc5d0\uc11c \uc774\ubbf8 \ub9ce\uc740 \uace0\ubbfc\uc744 \uac70\uccd0 \ubc1c\uc804\ub418\uc5b4 \uc654\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud560 \ub54c\ub3c4 \uc774\ub7f0 \uace0\ubbfc\uc758 \uacb0\uacfc\ubb3c\ub4e4\uc744 \ud65c\uc6a9\ud558\uba74 \ud070 \ub3c4\uc6c0\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nMLOps\uc5d0\uc11c \ub300\ud45c\uc801\uc73c\ub85c \ud65c\uc6a9\ud558\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc81c\ud488\uc774 \ubc14\ub85c \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\ub3c4\ucee4\uc640-\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("h3",{id:"\uae30\uc220-\uc774\ub984\uc774-\uc544\ub2c8\ub77c-\uc81c\ud488-\uc774\ub984"},"\uae30\uc220 \uc774\ub984\uc774 \uc544\ub2c8\ub77c \uc81c\ud488 \uc774\ub984"),(0,a.kt)("p",null,"\ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uac01\uac01 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\ub2a5\uacfc \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158(Container Orchestration) \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ub300\ud45c \uc18c\ud504\ud2b8\uc6e8\uc5b4(\uc81c\ud488)\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,a.kt)("p",null,"\ub3c4\ucee4\ub294 \uacfc\uac70\uc5d0 \ub300\uc138\uc600\uc9c0\ub9cc \uc720\ub8cc\ud654 \uad00\ub828 \uc815\ucc45\ub4e4\uc744 \ud558\ub098\uc529 \ucd94\uac00\ud558\uba74\uc11c \uc810\uc810 \uc0ac\uc6a9 \ube48\ub3c4\uac00 \ud558\ub77d\uc138\uc785\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc 2022\ub144 3\uc6d4 \uae30\uc900\uc73c\ub85c \uc544\uc9c1\uae4c\uc9c0\ub3c4 \uac00\uc7a5 \uc77c\ubc18\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2019.png",src:n(2235).Z,width:"1600",height:"900"})),(0,a.kt)("center",null," [from sysdig 2019] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"sysdig-2021.png",src:n(1256).Z,width:"750",height:"437"})),(0,a.kt)("center",null," [from sysdig 2021] "),(0,a.kt)("h4",{id:"\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uc9c0\uae08\uae4c\uc9c0\ub294 \ube44\uad50 \ub300\uc0c1\uc870\ucc28 \uac70\uc758 \uc5c6\ub294 \uc81c\ud488\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"cncf-survey.png",src:n(6640).Z,width:"2048",height:"1317"})),(0,a.kt)("center",null," [from cncf survey] "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"t4-ai.png",src:n(2044).Z,width:"926",height:"629"})),(0,a.kt)("center",null," [from t4.ai] "),(0,a.kt)("h3",{id:"\uc7ac\ubbf8\uc788\ub294-\uc624\ud508\uc18c\uc2a4-\uc5ed\uc0ac-\uc774\uc57c\uae30"},(0,a.kt)("strong",{parentName:"h3"},"\uc7ac\ubbf8\uc788\ub294 \uc624\ud508\uc18c\uc2a4 \uc5ed\uc0ac \uc774\uc57c\uae30")),(0,a.kt)("h4",{id:"\ucd08\uae30-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\ucd08\uae30 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucd08\uae30 \ub3c4\ucee4 \uac1c\ubc1c\uc2dc\uc5d0\ub294 Docker Engine\uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc758 \ud328\ud0a4\uc9c0"),"\uc5d0 API, CLI, \ub124\ud2b8\uc6cc\ud06c, \uc2a4\ud1a0\ub9ac\uc9c0 \ub4f1 \uc5ec\ub7ec \uae30\ub2a5\ub4e4\uc744 \ubaa8\ub450 \ud3ec\ud568\ud588\uc73c\ub098, ",(0,a.kt)("strong",{parentName:"p"},"MSA")," \uc758 \ucca0\ud559\uc744 \ub2f4\uc544 ",(0,a.kt)("strong",{parentName:"p"},"\ud558\ub098\uc529 \ubd84\ub9ac"),"\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ucd08\uae30\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654\ub97c \uc704\ud574 Docker Engine\uc744 \ub0b4\uc7a5\ud558\uace0 \uc788\uc5c8\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub3c4\ucee4 \ubc84\uc804\uc774 \uc5c5\ub370\uc774\ud2b8\ub420 \ub54c\ub9c8\ub2e4 Docker Engine \uc758 \uc778\ud130\ud398\uc774\uc2a4\uac00 \ubcc0\uacbd\ub418\uc5b4 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \ud06c\uac8c \uc601\ud5a5\uc744 \ubc1b\ub294 \uc77c\uc774 \uacc4\uc18d\ud574\uc11c \ubc1c\uc0dd\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,a.kt)("p",null,"\uadf8\ub798\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc774\ub7f0 \ubd88\ud3b8\ud568\uc744 \ud574\uc18c"),"\ud558\uace0\uc790, \ub3c4\ucee4\ub97c \uc911\uc2ec\uc73c\ub85c \uad6c\uae00 \ub4f1 \ucee8\ud14c\uc774\ub108 \uae30\uc220\uc5d0 \uad00\uc2ec\uc788\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec \uc9d1\ub2e8"),"\ub4e4\uc774 \ud55c\ub370 \ubaa8\uc5ec ",(0,a.kt)("strong",{parentName:"p"},"Open Container Initiative,")," \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"OCI"),"\ub77c\ub294 \ud504\ub85c\uc81d\ud2b8\ub97c \uc2dc\uc791\ud558\uc5ec \ucee8\ud14c\uc774\ub108\uc5d0 \uad00\ud55c ",(0,a.kt)("strong",{parentName:"p"},"\ud45c\uc900"),"\uc744 \uc815\ud558\ub294 \uc77c\ub4e4\uc744 \uc2dc\uc791\ud558\uc600\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\uc5d0\uc11c\ub3c4 \uc778\ud130\ud398\uc774\uc2a4\ub97c ",(0,a.kt)("strong",{parentName:"p"},"\ud55c \ubc88 \ub354 \ubd84\ub9ac"),"\ud574\uc11c, OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\ub294 ",(0,a.kt)("strong",{parentName:"p"},"containerd"),"\ub77c\ub294 Container Runtime \ub97c \uac1c\ubc1c\ud558\uace0, ",(0,a.kt)("strong",{parentName:"p"},"dockerd")," \uac00 containerd \uc758 API \ub97c \ud638\ucd9c\ud558\ub3c4\ub85d \ucd94\uc0c1\ud654 \ub808\uc774\uc5b4\ub97c \ucd94\uac00\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7ec\ud55c \ud750\ub984\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c\ub3c4 \uc774\uc81c\ubd80\ud130\ub294 \ub3c4\ucee4\ub9cc\uc744 \uc9c0\uc6d0\ud558\uc9c0 \uc54a\uace0, ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900\uc744")," \uc900\uc218\ud558\uace0, \uc815\ud574\uc9c4 \uc2a4\ud399\uc744 \uc9c0\ud0a4\ub294 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc740 \ubb34\uc5c7\uc774\ub4e0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d, Container Runtime Interface, \uc774\ud558 ",(0,a.kt)("strong",{parentName:"p"},"CRI \uc2a4\ud399"),"\uc744 \ubc84\uc804 1.5\ubd80\ud130 \uc81c\uacf5\ud558\uae30 \uc2dc\uc791\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"cri-o"},"CRI-O"),(0,a.kt)("p",null,"Red Hat, Intel, SUSE, IBM\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OCI \ud45c\uc900+CRI \uc2a4\ud399\uc744")," \ub530\ub77c Kubernetes \uc804\uc6a9 Container Runtime \uc744 \ubaa9\uc801\uc73c\ub85c \uac1c\ubc1c\ud55c \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc785\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"\uc9c0\uae08\uc758-\ub3c4\ucee4--\ucfe0\ubc84\ub124\ud2f0\uc2a4"},"\uc9c0\uae08\uc758 \ub3c4\ucee4 & \ucfe0\ubc84\ub124\ud2f0\uc2a4"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 Docker Engine \uc744 \ub514\ud3f4\ud2b8 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc0ac\uc6a9\ud574\uc654\uc9c0\ub9cc, \ub3c4\ucee4\uc758 API \uac00 ",(0,a.kt)("strong",{parentName:"p"},"CRI")," \uc2a4\ud399\uc5d0 \ub9de\uc9c0 \uc54a\uc544(",(0,a.kt)("em",{parentName:"p"},"OCI \ub294 \ub530\ub984"),") \ub3c4\ucee4\uc758 API\ub97c ",(0,a.kt)("strong",{parentName:"p"},"CRI"),"\uc640 \ud638\ud658\ub418\uac8c \ubc14\uafd4\uc8fc\ub294 ",(0,a.kt)("strong",{parentName:"p"},"dockershim"),"\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc790\uccb4\uc801\uc73c\ub85c \uac1c\ubc1c \ubc0f \uc9c0\uc6d0\ud574\uc654\uc5c8\ub294\ub370,(",(0,a.kt)("em",{parentName:"p"},"\ub3c4\ucee4 \uce21\uc774 \uc544\ub2c8\ub77c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uce21\uc5d0\uc11c \uc9c0\uc6d0\ud588\ub2e4\ub294 \uc810\uc774 \uad49\uc7a5\ud788 \ud070 \uc9d0\uc774\uc5c8\uc2b5\ub2c8\ub2e4."),") \uc774\uac78 \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,a.kt)("strong",{parentName:"p"},"v1.20 \ubd80\ud130\ub294 Deprecated\ud558\uace0,")," ",(0,a.kt)("strong",{parentName:"p"},"v1.23 \ubd80\ud130\ub294 \uc9c0\uc6d0\uc744 \ud3ec\uae30"),"\ud558\uae30\ub85c \uacb0\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"v1.23 \uc740 2021 \ub144 12\uc6d4 \ub9b4\ub9ac\uc988")),(0,a.kt)("p",null,"\uadf8\ub798\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 v1.23 \ubd80\ud130\ub294 \ub3c4\ucee4\ub97c native \ud558\uac8c \uc4f8 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc ",(0,a.kt)("strong",{parentName:"p"},"\uc0ac\uc6a9\uc790\ub4e4\uc740 \uc774\ub7f0 \ubcc0\ud654\uc5d0 \ud06c\uac8c \uad00\ub828\uc774 \uc788\uc9c4 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc65c\ub0d0\ud558\uba74 Docker Engine\uc744 \ud1b5\ud574 \ub9cc\ub4e4\uc5b4\uc9c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 OCI \ud45c\uc900\uc744 \uc900\uc218\ud558\uae30 \ub54c\ubb38\uc5d0, \ucfe0\ubc84\ub124\ud2f0\uc2a4\uac00 \uc5b4\ub5a4 \ucee8\ud14c\uc774\ub108 \ub7f0\ud0c0\uc784\uc73c\ub85c \uc774\ub8e8\uc5b4\uc838\uc788\ub4e0 \uc0ac\uc6a9 \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,a.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}k.isMDXComponent=!0},6640:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},2235:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},1256:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},2044:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file diff --git a/assets/js/4a09dd96.a626411c.js b/assets/js/4a09dd96.49f2f64e.js similarity index 97% rename from assets/js/4a09dd96.a626411c.js rename to assets/js/4a09dd96.49f2f64e.js index d4581132..9bc77aea 100644 --- a/assets/js/4a09dd96.a626411c.js +++ b/assets/js/4a09dd96.49f2f64e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[53],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function a(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):a(a({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),f=o,b=c["".concat(s,".").concat(f)]||c[f]||d[f]||i;return r?n.createElement(b,a(a({ref:t},p),{},{components:r})):n.createElement(b,a({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=r.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const i={title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},a=void 0,l={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline \uad00\ub828",description:"",source:"@site/docs/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/docs/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/docs/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/docs/kubeflow/kubeflow-intro"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,n.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions \ud398\uc774\uc9c0\ub4e4\uc5d0\uc11c\ub294 Kubeflow Pipeline\uacfc Pipeline\uc758 \uc2e4\ud589 \uadf8\ub9ac\uace0 Pipeline Run\uc758 \uacb0\uacfc\ub97c \uad00\ub9ac\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipeline\uc774 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c Kubeflow\ub97c \uc0ac\uc6a9\ud558\ub294 \uc8fc\ub41c \uc774\uc720\uc774\uba70, Kubeflow Pipeline\uc744 \ub9cc\ub4dc\ub294 \ubc29\ubc95, \uc2e4\ud589\ud558\ub294 \ubc29\ubc95, \uacb0\uacfc\ub97c \ud655\uc778\ud558\ub294 \ubc29\ubc95 \ub4f1 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"\uc5d0\uc11c \ub2e4\ub8f9\ub2c8\ub2e4."))}d.isMDXComponent=!0},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[53],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function a(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):a(a({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),f=o,b=c["".concat(s,".").concat(f)]||c[f]||d[f]||i;return r?n.createElement(b,a(a({ref:t},p),{},{components:r})):n.createElement(b,a({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=r.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const i={title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},a=void 0,l={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline \uad00\ub828",description:"",source:"@site/docs/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/docs/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/docs/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/docs/kubeflow/kubeflow-intro"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,n.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions \ud398\uc774\uc9c0\ub4e4\uc5d0\uc11c\ub294 Kubeflow Pipeline\uacfc Pipeline\uc758 \uc2e4\ud589 \uadf8\ub9ac\uace0 Pipeline Run\uc758 \uacb0\uacfc\ub97c \uad00\ub9ac\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipeline\uc774 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c Kubeflow\ub97c \uc0ac\uc6a9\ud558\ub294 \uc8fc\ub41c \uc774\uc720\uc774\uba70, Kubeflow Pipeline\uc744 \ub9cc\ub4dc\ub294 \ubc29\ubc95, \uc2e4\ud589\ud558\ub294 \ubc29\ubc95, \uacb0\uacfc\ub97c \ud655\uc778\ud558\ub294 \ubc29\ubc95 \ub4f1 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"\uc5d0\uc11c \ub2e4\ub8f9\ub2c8\ub2e4."))}d.isMDXComponent=!0},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/assets/js/4a2e0471.ea979e3b.js b/assets/js/4a2e0471.818f4e78.js similarity index 97% rename from assets/js/4a2e0471.ea979e3b.js rename to assets/js/4a2e0471.818f4e78.js index 8ec3570a..c9cf562a 100644 --- a/assets/js/4a2e0471.ea979e3b.js +++ b/assets/js/4a2e0471.818f4e78.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3774],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=o,b=c["".concat(s,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:o,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"version-1.0/kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/docs/1.0/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/docs/1.0/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/docs/1.0/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-kf"},"Kubeflow \uc124\uce58"),"\ub97c \uc644\ub8cc\ud558\uba74, \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"after-login",src:r(2920).Z,width:"4008",height:"1266"})),(0,o.kt)("p",null,"Central Dashboard\ub294 Kubeflow\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \ud1b5\ud569\ud558\uc5ec \uc81c\uacf5\ud558\ub294 UI\uc785\ub2c8\ub2e4. Central Dashboard\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \uae30\ub2a5\uc740 \ud06c\uac8c \uc67c\ucabd\uc758 \ud0ed\uc744 \uae30\uc900\uc73c\ub85c \uad6c\ubd84\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Home"),(0,o.kt)("li",{parentName:"ul"},"Notebooks"),(0,o.kt)("li",{parentName:"ul"},"Tensorboards"),(0,o.kt)("li",{parentName:"ul"},"Volumes"),(0,o.kt)("li",{parentName:"ul"},"Models"),(0,o.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,o.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,o.kt)("li",{parentName:"ul"},"Pipelines"),(0,o.kt)("li",{parentName:"ul"},"Runs"),(0,o.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,o.kt)("li",{parentName:"ul"},"Artifacts"),(0,o.kt)("li",{parentName:"ul"},"Executions")),(0,o.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uae30\ub2a5\ubcc4 \uac04\ub2e8\ud55c \uc0ac\uc6a9\ubc95\uc744 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},2920:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3774],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=o,b=c["".concat(s,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:o,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"version-1.0/kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/docs/1.0/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/docs/1.0/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/docs/1.0/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-kf"},"Kubeflow \uc124\uce58"),"\ub97c \uc644\ub8cc\ud558\uba74, \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"after-login",src:r(2920).Z,width:"4008",height:"1266"})),(0,o.kt)("p",null,"Central Dashboard\ub294 Kubeflow\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \ud1b5\ud569\ud558\uc5ec \uc81c\uacf5\ud558\ub294 UI\uc785\ub2c8\ub2e4. Central Dashboard\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \uae30\ub2a5\uc740 \ud06c\uac8c \uc67c\ucabd\uc758 \ud0ed\uc744 \uae30\uc900\uc73c\ub85c \uad6c\ubd84\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Home"),(0,o.kt)("li",{parentName:"ul"},"Notebooks"),(0,o.kt)("li",{parentName:"ul"},"Tensorboards"),(0,o.kt)("li",{parentName:"ul"},"Volumes"),(0,o.kt)("li",{parentName:"ul"},"Models"),(0,o.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,o.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,o.kt)("li",{parentName:"ul"},"Pipelines"),(0,o.kt)("li",{parentName:"ul"},"Runs"),(0,o.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,o.kt)("li",{parentName:"ul"},"Artifacts"),(0,o.kt)("li",{parentName:"ul"},"Executions")),(0,o.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uae30\ub2a5\ubcc4 \uac04\ub2e8\ud55c \uc0ac\uc6a9\ubc95\uc744 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},2920:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/assets/js/4e3fc5d7.05108ede.js b/assets/js/4e3fc5d7.a6d29434.js similarity index 99% rename from assets/js/4e3fc5d7.05108ede.js rename to assets/js/4e3fc5d7.a6d29434.js index e43fa9e4..a8019d3e 100644 --- a/assets/js/4e3fc5d7.05108ede.js +++ b/assets/js/4e3fc5d7.a6d29434.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6742],{3905:(e,t,i)=>{i.d(t,{Zo:()=>c,kt:()=>b});var n=i(7294);function l(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function p(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),i.push.apply(i,n)}return i}function a(e){for(var t=1;t=0||(l[i]=e[i]);return l}(e,t);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,i)&&(l[i]=e[i])}return l}var o=n.createContext({}),s=function(e){var t=n.useContext(o),i=t;return e&&(i="function"==typeof e?e(t):a(a({},t),e)),i},c=function(e){var t=s(e.components);return n.createElement(o.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},g=n.forwardRef((function(e,t){var i=e.components,l=e.mdxType,p=e.originalType,o=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),u=s(i),g=l,b=u["".concat(o,".").concat(g)]||u[g]||d[g]||p;return i?n.createElement(b,a(a({ref:t},c),{},{components:i})):n.createElement(b,a({ref:t},c))}));function b(e,t){var i=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var p=i.length,a=new Array(p);a[0]=g;var r={};for(var o in t)hasOwnProperty.call(t,o)&&(r[o]=t[o]);r.originalType=e,r[u]="string"==typeof e?e:l,a[1]=r;for(var s=2;s{i.r(t),i.d(t,{assets:()=>o,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>r,toc:()=>s});var n=i(7462),l=(i(7294),i(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,r={unversionedId:"kubeflow/basic-pipeline-upload",id:"version-1.0/kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/docs/1.0/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline-upload.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/docs/1.0/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/docs/1.0/kubeflow/basic-run"}},o={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Pipelines \ud0ed \uc120\ud0dd",id:"1-pipelines-\ud0ed-\uc120\ud0dd",level:3},{value:"2. Upload Pipeline \uc120\ud0dd",id:"2-upload-pipeline-\uc120\ud0dd",level:3},{value:"3. Choose file \uc120\ud0dd",id:"3-choose-file-\uc120\ud0dd",level:3},{value:"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc",id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"\uc774\uc81c \uc6b0\ub9ac\uac00 \ub9cc\ub4e0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc9c1\uc811 kubeflow\uc5d0\uc11c \uc5c5\ub85c\ub4dc \ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ub294 kubeflow \ub300\uc2dc\ubcf4\ub4dc UI\ub97c \ud1b5\ud574 \uc9c4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n",(0,l.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-kf#%EC%A0%95%EC%83%81-%EC%84%A4%EC%B9%98-%ED%99%95%EC%9D%B8"},"Install Kubeflow")," \uc5d0\uc11c \uc0ac\uc6a9\ud55c \ubc29\ubc95\uc744 \uc774\uc6a9\ud574 \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080"),"\uc5d0 \uc811\uc18d\ud574 \ub300\uc2dc\ubcf4\ub4dc\ub97c \uc5f4\uc5b4\uc90d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-pipelines-\ud0ed-\uc120\ud0dd"},"1. Pipelines \ud0ed \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:i(6451).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-upload-pipeline-\uc120\ud0dd"},"2. Upload Pipeline \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:i(1382).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-choose-file-\uc120\ud0dd"},"3. Choose file \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:i(3109).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc"},"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:i(9393).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(2017).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc740 \uc5c5\ub85c\ub4dc\ub97c \ud1b5\ud574\uc11c \ubc84\uc804\uc744 \uad00\ub9ac\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub2e4\ub9cc \uae43\ud5d9\uacfc \uac19\uc740 \ucf54\ub4dc \ucc28\uc6d0\uc758 \ubc84\uc804 \uad00\ub9ac\uac00 \uc544\ub2cc \uac19\uc740 \uc774\ub984\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.\n\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud55c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 example_pipeline\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:i(5736).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(2017).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Upload Version\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uc218 \uc788\ub294 \ud654\uba74\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:i(502).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc \ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:i(8625).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778 \ubc84\uc804\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:i(6854).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},6451:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},1382:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},3109:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},9393:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},2017:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},5736:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},502:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},8625:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},6854:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6742],{3905:(e,t,i)=>{i.d(t,{Zo:()=>c,kt:()=>b});var n=i(7294);function l(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function p(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),i.push.apply(i,n)}return i}function a(e){for(var t=1;t=0||(l[i]=e[i]);return l}(e,t);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,i)&&(l[i]=e[i])}return l}var o=n.createContext({}),s=function(e){var t=n.useContext(o),i=t;return e&&(i="function"==typeof e?e(t):a(a({},t),e)),i},c=function(e){var t=s(e.components);return n.createElement(o.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},g=n.forwardRef((function(e,t){var i=e.components,l=e.mdxType,p=e.originalType,o=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),u=s(i),g=l,b=u["".concat(o,".").concat(g)]||u[g]||d[g]||p;return i?n.createElement(b,a(a({ref:t},c),{},{components:i})):n.createElement(b,a({ref:t},c))}));function b(e,t){var i=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var p=i.length,a=new Array(p);a[0]=g;var r={};for(var o in t)hasOwnProperty.call(t,o)&&(r[o]=t[o]);r.originalType=e,r[u]="string"==typeof e?e:l,a[1]=r;for(var s=2;s{i.r(t),i.d(t,{assets:()=>o,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>r,toc:()=>s});var n=i(7462),l=(i(7294),i(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,r={unversionedId:"kubeflow/basic-pipeline-upload",id:"version-1.0/kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/docs/1.0/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline-upload.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/docs/1.0/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/docs/1.0/kubeflow/basic-run"}},o={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Pipelines \ud0ed \uc120\ud0dd",id:"1-pipelines-\ud0ed-\uc120\ud0dd",level:3},{value:"2. Upload Pipeline \uc120\ud0dd",id:"2-upload-pipeline-\uc120\ud0dd",level:3},{value:"3. Choose file \uc120\ud0dd",id:"3-choose-file-\uc120\ud0dd",level:3},{value:"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc",id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"\uc774\uc81c \uc6b0\ub9ac\uac00 \ub9cc\ub4e0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc9c1\uc811 kubeflow\uc5d0\uc11c \uc5c5\ub85c\ub4dc \ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ub294 kubeflow \ub300\uc2dc\ubcf4\ub4dc UI\ub97c \ud1b5\ud574 \uc9c4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n",(0,l.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-kf#%EC%A0%95%EC%83%81-%EC%84%A4%EC%B9%98-%ED%99%95%EC%9D%B8"},"Install Kubeflow")," \uc5d0\uc11c \uc0ac\uc6a9\ud55c \ubc29\ubc95\uc744 \uc774\uc6a9\ud574 \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080"),"\uc5d0 \uc811\uc18d\ud574 \ub300\uc2dc\ubcf4\ub4dc\ub97c \uc5f4\uc5b4\uc90d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-pipelines-\ud0ed-\uc120\ud0dd"},"1. Pipelines \ud0ed \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:i(6451).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-upload-pipeline-\uc120\ud0dd"},"2. Upload Pipeline \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:i(1382).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-choose-file-\uc120\ud0dd"},"3. Choose file \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:i(3109).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc"},"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:i(9393).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(2017).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc740 \uc5c5\ub85c\ub4dc\ub97c \ud1b5\ud574\uc11c \ubc84\uc804\uc744 \uad00\ub9ac\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub2e4\ub9cc \uae43\ud5d9\uacfc \uac19\uc740 \ucf54\ub4dc \ucc28\uc6d0\uc758 \ubc84\uc804 \uad00\ub9ac\uac00 \uc544\ub2cc \uac19\uc740 \uc774\ub984\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.\n\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud55c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 example_pipeline\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:i(5736).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(2017).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Upload Version\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uc218 \uc788\ub294 \ud654\uba74\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:i(502).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc \ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:i(8625).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778 \ubc84\uc804\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:i(6854).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},6451:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},1382:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},3109:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},9393:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},2017:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},5736:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},502:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},8625:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},6854:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file diff --git a/assets/js/4fc796a7.55c8ebbe.js b/assets/js/4fc796a7.8d31fde2.js similarity index 99% rename from assets/js/4fc796a7.55c8ebbe.js rename to assets/js/4fc796a7.8d31fde2.js index 0c201fdf..4dd63723 100644 --- a/assets/js/4fc796a7.55c8ebbe.js +++ b/assets/js/4fc796a7.8d31fde2.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3872],{3905:(n,e,t)=>{t.d(e,{Zo:()=>p,kt:()=>_});var a=t(7294);function l(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function r(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(l[t]=n[t]);return l}(n,e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(l[t]=n[t])}return l}var s=a.createContext({}),m=function(n){var e=a.useContext(s),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},p=function(n){var e=m(n.components);return a.createElement(s.Provider,{value:e},n.children)},d="mdxType",c={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,l=n.mdxType,r=n.originalType,s=n.parentName,p=o(n,["components","mdxType","originalType","parentName"]),d=m(t),u=l,_=d["".concat(s,".").concat(u)]||d[u]||c[u]||r;return t?a.createElement(_,i(i({ref:e},p),{},{components:t})):a.createElement(_,i({ref:e},p))}));function _(n,e){var t=arguments,l=e&&e.mdxType;if("string"==typeof n||l){var r=t.length,i=new Array(r);i[0]=u;var o={};for(var s in e)hasOwnProperty.call(e,s)&&(o[s]=e[s]);o.originalType=n,o[d]="string"==typeof n?n:l,i[1]=o;for(var m=2;m{t.r(e),t.d(e,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},i=void 0,o={unversionedId:"api-deployment/seldon-children",id:"version-1.0/api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/docs/1.0/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-children.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/docs/1.0/api-deployment/seldon-mlflow"},next:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/1.0/appendix/pyenv"}},s={},m=[{value:"Multi Models",id:"multi-models",level:2},{value:"Pipeline",id:"pipeline",level:2}],p={toc:m},d="wrapper";function c(n){let{components:e,...r}=n;return(0,l.kt)(d,(0,a.Z)({},p,r,{components:e,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"multi-models"},"Multi Models"),(0,l.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud588\ub358 \ubc29\ubc95\ub4e4\uc740 \ubaa8\ub450 \ub2e8\uc77c \ubaa8\ub378\uc744 \ub300\uc0c1\uc73c\ub85c \ud588\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc5ec\ub7ec \uac1c\uc758 \ubaa8\ub378\uc744 \uc5f0\uacb0\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pipeline"},"Pipeline"),(0,l.kt)("p",null,"\uc6b0\uc120 \ubaa8\ub378\uc744 2\uac1c\ub97c \uc0dd\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc740 \uc55e\uc11c \uc0ac\uc6a9\ud55c SVC \ubaa8\ub378\uc5d0 StandardScaler\ub97c \ucd94\uac00\ud558\uace0 \uc800\uc7a5\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(1906).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"MLflow \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ubaa8\ub378\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(3188).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"\uac01\uac01\uc758 run_id\ub97c \ud655\uc778 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc774 SeldonDeployment \uc2a4\ud399\uc744 \uc815\uc758\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \ub450 \uac1c\uac00 \ub418\uc5c8\uc73c\ubbc0\ub85c \uac01 \ubaa8\ub378\uc758 initContainer\uc640 container\ub97c \uc815\uc758\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774 \ud544\ub4dc\ub294 \uc785\ub825\uac12\uc744 array\ub85c \ubc1b\uc73c\uba70 \uc21c\uc11c\ub294 \uad00\uacc4\uc5c6\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \uc2e4\ud589\ud558\ub294 \uc21c\uc11c\ub294 graph\uc5d0\uc11c \uc815\uc758\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"graph\uc758 \ub3d9\uc791 \ubc29\uc2dd\uc740 \ucc98\uc74c \ubc1b\uc740 \uac12\uc744 \uc815\ud574\uc9c4 predict_method\ub85c \ubcc0\ud658\ud55c \ub4a4 children\uc73c\ub85c \uc815\uc758\ub41c \ubaa8\ub378\uc5d0 \uc804\ub2ec\ud558\ub294 \ubc29\uc2dd\uc785\ub2c8\ub2e4.\n\uc774 \uacbd\uc6b0 scaler -> svc \ub85c \ub370\uc774\ud130\uac00 \uc804\ub2ec\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\uc81c \uc704\uc758 \uc2a4\ud399\uc744 yaml\ud30c\uc77c\ub85c \uc0dd\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 API\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub410\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c pod\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},1906:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},3188:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3872],{3905:(n,e,t)=>{t.d(e,{Zo:()=>p,kt:()=>_});var a=t(7294);function l(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function r(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(l[t]=n[t]);return l}(n,e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(l[t]=n[t])}return l}var s=a.createContext({}),m=function(n){var e=a.useContext(s),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},p=function(n){var e=m(n.components);return a.createElement(s.Provider,{value:e},n.children)},d="mdxType",c={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,l=n.mdxType,r=n.originalType,s=n.parentName,p=o(n,["components","mdxType","originalType","parentName"]),d=m(t),u=l,_=d["".concat(s,".").concat(u)]||d[u]||c[u]||r;return t?a.createElement(_,i(i({ref:e},p),{},{components:t})):a.createElement(_,i({ref:e},p))}));function _(n,e){var t=arguments,l=e&&e.mdxType;if("string"==typeof n||l){var r=t.length,i=new Array(r);i[0]=u;var o={};for(var s in e)hasOwnProperty.call(e,s)&&(o[s]=e[s]);o.originalType=n,o[d]="string"==typeof n?n:l,i[1]=o;for(var m=2;m{t.r(e),t.d(e,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},i=void 0,o={unversionedId:"api-deployment/seldon-children",id:"version-1.0/api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/docs/1.0/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-children.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/docs/1.0/api-deployment/seldon-mlflow"},next:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/1.0/appendix/pyenv"}},s={},m=[{value:"Multi Models",id:"multi-models",level:2},{value:"Pipeline",id:"pipeline",level:2}],p={toc:m},d="wrapper";function c(n){let{components:e,...r}=n;return(0,l.kt)(d,(0,a.Z)({},p,r,{components:e,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"multi-models"},"Multi Models"),(0,l.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud588\ub358 \ubc29\ubc95\ub4e4\uc740 \ubaa8\ub450 \ub2e8\uc77c \ubaa8\ub378\uc744 \ub300\uc0c1\uc73c\ub85c \ud588\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc5ec\ub7ec \uac1c\uc758 \ubaa8\ub378\uc744 \uc5f0\uacb0\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pipeline"},"Pipeline"),(0,l.kt)("p",null,"\uc6b0\uc120 \ubaa8\ub378\uc744 2\uac1c\ub97c \uc0dd\uc131\ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc740 \uc55e\uc11c \uc0ac\uc6a9\ud55c SVC \ubaa8\ub378\uc5d0 StandardScaler\ub97c \ucd94\uac00\ud558\uace0 \uc800\uc7a5\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(1906).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"MLflow \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ubaa8\ub378\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(3188).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"\uac01\uac01\uc758 run_id\ub97c \ud655\uc778 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc774 SeldonDeployment \uc2a4\ud399\uc744 \uc815\uc758\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \ub450 \uac1c\uac00 \ub418\uc5c8\uc73c\ubbc0\ub85c \uac01 \ubaa8\ub378\uc758 initContainer\uc640 container\ub97c \uc815\uc758\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774 \ud544\ub4dc\ub294 \uc785\ub825\uac12\uc744 array\ub85c \ubc1b\uc73c\uba70 \uc21c\uc11c\ub294 \uad00\uacc4\uc5c6\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ubaa8\ub378\uc774 \uc2e4\ud589\ud558\ub294 \uc21c\uc11c\ub294 graph\uc5d0\uc11c \uc815\uc758\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"graph\uc758 \ub3d9\uc791 \ubc29\uc2dd\uc740 \ucc98\uc74c \ubc1b\uc740 \uac12\uc744 \uc815\ud574\uc9c4 predict_method\ub85c \ubcc0\ud658\ud55c \ub4a4 children\uc73c\ub85c \uc815\uc758\ub41c \ubaa8\ub378\uc5d0 \uc804\ub2ec\ud558\ub294 \ubc29\uc2dd\uc785\ub2c8\ub2e4.\n\uc774 \uacbd\uc6b0 scaler -> svc \ub85c \ub370\uc774\ud130\uac00 \uc804\ub2ec\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\uc81c \uc704\uc758 \uc2a4\ud399\uc744 yaml\ud30c\uc77c\ub85c \uc0dd\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 API\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub410\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c pod\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},1906:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},3188:(n,e,t)=>{t.d(e,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file diff --git a/assets/js/541347e5.142ae047.js b/assets/js/541347e5.455fe2c3.js similarity index 99% rename from assets/js/541347e5.142ae047.js rename to assets/js/541347e5.455fe2c3.js index 856f6787..eb7da235 100644 --- a/assets/js/541347e5.142ae047.js +++ b/assets/js/541347e5.455fe2c3.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2725],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>k});var i=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function o(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=i.createContext({}),u=function(e){var t=i.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},s=function(e){var t=u(e.components);return i.createElement(p.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},m=i.forwardRef((function(e,t){var n=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,s=a(e,["components","mdxType","originalType","parentName"]),d=u(n),m=r,k=d["".concat(p,".").concat(m)]||d[m]||c[m]||l;return n?i.createElement(k,o(o({ref:t},s),{},{components:n})):i.createElement(k,o({ref:t},s))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=n.length,o=new Array(l);o[0]=m;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[d]="string"==typeof e?e:r,o[1]=a;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>c,frontMatter:()=>l,metadata:()=>a,toc:()=>u});var i=n(7462),r=(n(7294),n(3905));const l={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},o=void 0,a={unversionedId:"introduction/component",id:"version-1.0/introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/versioned_docs/version-1.0/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/docs/1.0/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/docs/1.0/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/docs/1.0/introduction/why_kubernetes"}},p={},u=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model training",id:"3-model-training",level:3},{value:"4. Model evaluation",id:"4-model-evaluation",level:3},{value:"5. Model serving",id:"5-model-serving",level:3},{value:"6. Online experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],s={toc:u},d="wrapper";function c(e){let{components:t,...l}=e;return(0,r.kt)(d,(0,i.Z)({},s,l,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,r.kt)("p",null," 2021\ub144 5\uc6d4\uc5d0 \ubc1c\ud45c\ub41c \uad6c\uae00\uc758 ",(0,r.kt)("a",{parentName:"p",href:"https://services.google.com/fh/files/misc/practitioners_guide_to_mlops_whitepaper.pdf"},"white paper : Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning"),"\uc5d0\uc11c\ub294 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\ub4e4\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc744 \uc5b8\uae09\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlops-component",src:n(8037).Z,width:"2352",height:"1890"})),(0,r.kt)("p",null," \uac01 \uae30\ub2a5\uc774 \uc5b4\ub5a4 \uc5ed\ud560\uc744 \ud558\ub294\uc9c0 \uc0b4\ud3b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,r.kt)("p",null," \uc2e4\ud5d8(Experimentation)\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub370\uc774\ud130\ub97c \ubd84\uc11d\ud558\uace0, \ud504\ub85c\ud1a0\ud0c0\uc785 \ubaa8\ub378\uc744 \ub9cc\ub4e4\uba70 \ud559\uc2b5 \uae30\ub2a5\uc744 \uad6c\ud604\ud560 \uc218 \uc788\ub3c4\ub85d \ud558\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\uae43(Git)\uacfc \uac19\uc740 \ubc84\uc804 \ucee8\ud2b8\ub864 \ub3c4\uad6c\uc640 \ud1b5\ud569\ub41c \ub178\ud2b8\ubd81(Jupyter Notebook) \ud658\uacbd \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\ud55c \ub370\uc774\ud130, \ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130, \ud3c9\uac00 \uc9c0\ud45c\ub97c \ud3ec\ud568\ud55c \uc2e4\ud5d8 \ucd94\uc801 \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc640 \ubaa8\ub378\uc5d0 \ub300\ud55c \ubd84\uc11d \ubc0f \uc2dc\uac01\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,r.kt)("p",null," \ub370\uc774\ud130 \ucc98\ub9ac(Data Processing)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378 \uac1c\ubc1c \ub2e8\uacc4, \uc9c0\uc18d\uc801\uc778 \ud559\uc2b5(Continuous Training) \ub2e8\uacc4, \uadf8\ub9ac\uace0 API \ubc30\ud3ec(API Deployment) \ub2e8\uacc4\uc5d0\uc11c \ub9ce\uc740 \uc591\uc758 \ub370\uc774\ud130\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ub370\uc774\ud130 \uc18c\uc2a4\uc640 \uc11c\ube44\uc2a4\uc5d0 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \ucee4\ub125\ud130(connector) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \uc778\ucf54\ub354(encoder) & \ub514\ucf54\ub354(decoder) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ub370\uc774\ud130 \ubcc0\ud658\uacfc \ud53c\ucc98 \uc5d4\uc9c0\ub2c8\uc5b4\ub9c1(feature engineering) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ud559\uc2b5\uacfc \uc11c\ube59\uc744 \uc704\ud55c \ud655\uc7a5 \uac00\ub2a5\ud55c \ubc30\uce58, \uc2a4\ud2b8\ub9bc \ub370\uc774\ud130 \ucc98\ub9ac \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"3-model-training"},"3. Model training"),(0,r.kt)("p",null," \ubaa8\ub378 \ud559\uc2b5(Model training)\uc740 \ubaa8\ub378 \ud559\uc2b5\uc744 \uc704\ud55c \uc54c\uace0\ub9ac\uc998\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc2e4\ud589\uc2dc\ucf1c\uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"ML \ud504\ub808\uc784\uc6cc\ud06c\uc758 \uc2e4\ud589\uc744 \uc704\ud55c \ud658\uacbd \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc218\uc758 GPU / \ubd84\uc0b0 \ud559\uc2b5 \uc0ac\uc6a9\uc744 \uc704\ud55c \ubd84\uc0b0 \ud559\uc2b5 \ud658\uacbd \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130 \ud29c\ub2dd\uacfc \ucd5c\uc801\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"4-model-evaluation"},"4. Model evaluation"),(0,r.kt)("p",null," \ubaa8\ub378 \ud3c9\uac00(Model evaluation)\ub294 \uc2e4\ud5d8 \ud658\uacbd\uacfc \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uad00\ucc30\ud560 \uc218 \uc788\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ud3c9\uac00 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ubaa8\ub378 \uc131\ub2a5 \ud3c9\uac00 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc9c0\uc18d \ud559\uc2b5 \uc2e4\ud589 \uacb0\uacfc\uc5d0 \ub300\ud55c \uc608\uce21 \uc131\ub2a5 \ucd94\uc801"),(0,r.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \ubaa8\ub378\uc758 \uc131\ub2a5 \ube44\uad50\uc640 \uc2dc\uac01\ud654"),(0,r.kt)("li",{parentName:"ul"},"\ud574\uc11d\ud560 \uc218 \uc788\ub294 AI \uae30\uc220\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378 \ucd9c\ub825 \ud574\uc11d \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"5-model-serving"},"5. Model serving"),(0,r.kt)("p",null," \ubaa8\ub378 \uc11c\ube59(Model serving)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uc11c\ube59\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\uc800 \uc9c0\uc5f0 \ucd94\ub860\uacfc \uace0\uac00\uc6a9\uc131 \ucd94\ub860 \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c ML \ubaa8\ub378 \uc11c\ube59 \ud504\ub808\uc784\uc6cc\ud06c \uc9c0\uc6d0(Tensorflow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGGoost. etc)"),(0,r.kt)("li",{parentName:"ul"},"\ubcf5\uc7a1\ud55c \ud615\ud0dc\uc758 \ucd94\ub860 \ub8e8\ud2f4 \uae30\ub2a5 \uc81c\uacf5, \uc608\ub97c \ub4e4\uc5b4 \uc804\ucc98\ub9ac(preprocess) \ub610\ub294 \ud6c4\ucc98\ub9ac(postprocess) \uae30\ub2a5\uacfc \ucd5c\uc885 \uacb0\uacfc\ub97c \uc704\ud574 \ub2e4\uc218\uc758 \ubaa8\ub378\uc774 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\ub97c \ub9d0\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\uc21c\uac04\uc801\uc73c\ub85c \uce58\uc19f\ub294 \ucd94\ub860 \uc694\uccad\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud55c \uc624\ud1a0 \uc2a4\ucf00\uc77c\ub9c1(autoscaling) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ucd94\ub860 \uc694\uccad\uacfc \ucd94\ub860 \uacb0\uacfc\uc5d0 \ub300\ud55c \ub85c\uae45 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"6-online-experimentation"},"6. Online experimentation"),(0,r.kt)("p",null," \uc628\ub77c\uc778 \uc2e4\ud5d8(Online experimentation)\uc740 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc774 \uc0dd\uc131\ub418\uc5c8\uc744 \ub54c, \uc774 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uba74 \uc5b4\ub290 \uc815\ub3c4\uc758 \uc131\ub2a5\uc744 \ubcf4\uc77c \uac83\uc778\uc9c0 \uac80\uc99d\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc774 \uae30\ub2a5\uc740 \uc0c8 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\ub294 \uac83\uae4c\uc9c0 \uc5f0\ub3d9\ud558\uae30 \uc704\ud574 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\uc640 \uc5f0\ub3d9\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\uce74\ub098\ub9ac(canary) & \uc100\ub3c4(shadow) \ubc30\ud3ec \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"A/B \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\uba40\ud2f0 \uc554\ub4dc \ubc34\ub527(Multi-armed bandit) \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,r.kt)("p",null,"\ubaa8\ub378 \ubaa8\ub2c8\ud130\ub9c1(Model Monitoring)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \ubaa8\ub2c8\ud130\ub9c1\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4 \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \ub5a8\uc5b4\uc838 \uc5c5\ub370\uc774\ud2b8\uac00 \ud544\uc694\ud55c\uc9c0\uc5d0 \ub300\ud55c \uc815\ubcf4 \ub4f1\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,r.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ud30c\uc774\ud504\ub77c\uc778(ML Pipeline)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ubcf5\uc7a1\ud55c ML \ud559\uc2b5\uacfc \ucd94\ub860 \uc791\uc5c5\uc744 \uad6c\uc131\ud558\uace0 \uc81c\uc5b4\ud558\uace0 \uc790\ub3d9\ud654\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \uc774\ubca4\ud2b8\ub97c \uc18c\uc2a4\ub97c \ud1b5\ud55c \ud30c\uc774\ud504\ub77c\uc778 \uc2e4\ud589 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130\uc640 \uc0dd\uc131\ub418\ub294 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub97c \uc704\ud55c \uba38\uc2e0\ub7ec\ub2dd \uba54\ud0c0\ub370\uc774\ud130 \ucd94\uc801\uacfc \uc5f0\ub3d9 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc778 \uba38\uc2e0\ub7ec\ub2dd \uc791\uc5c5\uc744 \uc704\ud55c \ub0b4\uc7a5 \ucef4\ud3ec\ub10c\ud2b8 \uc9c0\uc6d0\uacfc \uc0ac\uc6a9\uc790\uac00 \uc9c1\uc811 \uad6c\ud604\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \uc9c0\uc6d0 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc2e4\ud589 \ud658\uacbd \uc81c\uacf5 \uae30\ub2a5")),(0,r.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,r.kt)("p",null," \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \uc0dd\uba85 \uc8fc\uae30(Lifecycle)\uc744 \uc911\uc559 \uc800\uc7a5\uc18c\uc5d0\uc11c \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378 \uadf8\ub9ac\uace0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ub300\ud55c \ub4f1\ub85d, \ucd94\uc801, \ubc84\uc800\ub2dd \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ubc30\ud3ec\ub97c \uc704\ud574 \ud544\uc694\ud55c \ub370\uc774\ud130\uc640 \ub7f0\ud0c0\uc784 \ud328\ud0a4\uc9c0\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4 \uc800\uc7a5 \uae30\ub2a5")),(0,r.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc5d0 \ub300\ud55c \uacf5\uc720, \uac80\uc0c9, \uc7ac\uc0ac\uc6a9 \uadf8\ub9ac\uace0 \ubc84\uc804 \uad00\ub9ac \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc774\ubca4\ud2b8 \uc2a4\ud2b8\ub9ac\ubc0d \ubc0f \uc628\ub77c\uc778 \ucd94\ub860 \uc791\uc5c5\uc5d0 \ub300\ud55c \uc2e4\uc2dc\uac04 \ucc98\ub9ac \ubc0f \uc800 \uc9c0\uc5f0 \uc11c\ube59 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc0ac\uc9c4, \ud14d\uc2a4\ud2b8, \ud14c\uc774\ube14 \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \uac19\uc740 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130 \uc9c0\uc6d0 \uae30\ub2a5")),(0,r.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,r.kt)("p",null," MLOps\uc758 \uac01 \ub2e8\uacc4\uc5d0\uc11c\ub294 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \uc0b0\ucd9c\ubb3c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. ML \uba54\ud0c0\ub370\uc774\ud130\ub294 \uc774\ub7f0 \uc0b0\ucd9c\ubb3c\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\nML \uba54\ud0c0\ub370\uc774\ud130\uc640 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub294 \uc0b0\ucd9c\ubb3c\uc758 \uc704\uce58, \ud0c0\uc785, \uc18d\uc131, \uadf8\ub9ac\uace0 \uad00\ub828\ub41c \uc2e4\ud5d8(experiment)\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uad00\ub9ac\ud558\uae30 \uc704\ud574 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \ud788\uc2a4\ud1a0\ub9ac \uad00\ub9ac \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc2e4\ud5d8\uacfc \ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130 \uc124\uc815\uc5d0 \ub300\ud55c \ucd94\uc801, \uacf5\uc720 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \uc800\uc7a5, \uc811\uadfc, \uc2dc\uac01\ud654, \ub2e4\uc6b4\ub85c\ub4dc \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\ub978 MLOps \uae30\ub2a5\uacfc\uc758 \ud1b5\ud569 \uae30\ub2a5 \uc81c\uacf5")))}c.isMDXComponent=!0},8037:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2725],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>k});var i=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function o(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=i.createContext({}),u=function(e){var t=i.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},s=function(e){var t=u(e.components);return i.createElement(p.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},m=i.forwardRef((function(e,t){var n=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,s=a(e,["components","mdxType","originalType","parentName"]),d=u(n),m=r,k=d["".concat(p,".").concat(m)]||d[m]||c[m]||l;return n?i.createElement(k,o(o({ref:t},s),{},{components:n})):i.createElement(k,o({ref:t},s))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=n.length,o=new Array(l);o[0]=m;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[d]="string"==typeof e?e:r,o[1]=a;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>c,frontMatter:()=>l,metadata:()=>a,toc:()=>u});var i=n(7462),r=(n(7294),n(3905));const l={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},o=void 0,a={unversionedId:"introduction/component",id:"version-1.0/introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/versioned_docs/version-1.0/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/docs/1.0/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/docs/1.0/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/docs/1.0/introduction/why_kubernetes"}},p={},u=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model training",id:"3-model-training",level:3},{value:"4. Model evaluation",id:"4-model-evaluation",level:3},{value:"5. Model serving",id:"5-model-serving",level:3},{value:"6. Online experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],s={toc:u},d="wrapper";function c(e){let{components:t,...l}=e;return(0,r.kt)(d,(0,i.Z)({},s,l,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,r.kt)("p",null," 2021\ub144 5\uc6d4\uc5d0 \ubc1c\ud45c\ub41c \uad6c\uae00\uc758 ",(0,r.kt)("a",{parentName:"p",href:"https://services.google.com/fh/files/misc/practitioners_guide_to_mlops_whitepaper.pdf"},"white paper : Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning"),"\uc5d0\uc11c\ub294 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\ub4e4\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc744 \uc5b8\uae09\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlops-component",src:n(8037).Z,width:"2352",height:"1890"})),(0,r.kt)("p",null," \uac01 \uae30\ub2a5\uc774 \uc5b4\ub5a4 \uc5ed\ud560\uc744 \ud558\ub294\uc9c0 \uc0b4\ud3b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,r.kt)("p",null," \uc2e4\ud5d8(Experimentation)\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub370\uc774\ud130\ub97c \ubd84\uc11d\ud558\uace0, \ud504\ub85c\ud1a0\ud0c0\uc785 \ubaa8\ub378\uc744 \ub9cc\ub4e4\uba70 \ud559\uc2b5 \uae30\ub2a5\uc744 \uad6c\ud604\ud560 \uc218 \uc788\ub3c4\ub85d \ud558\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\uae43(Git)\uacfc \uac19\uc740 \ubc84\uc804 \ucee8\ud2b8\ub864 \ub3c4\uad6c\uc640 \ud1b5\ud569\ub41c \ub178\ud2b8\ubd81(Jupyter Notebook) \ud658\uacbd \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\ud55c \ub370\uc774\ud130, \ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130, \ud3c9\uac00 \uc9c0\ud45c\ub97c \ud3ec\ud568\ud55c \uc2e4\ud5d8 \ucd94\uc801 \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc640 \ubaa8\ub378\uc5d0 \ub300\ud55c \ubd84\uc11d \ubc0f \uc2dc\uac01\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,r.kt)("p",null," \ub370\uc774\ud130 \ucc98\ub9ac(Data Processing)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378 \uac1c\ubc1c \ub2e8\uacc4, \uc9c0\uc18d\uc801\uc778 \ud559\uc2b5(Continuous Training) \ub2e8\uacc4, \uadf8\ub9ac\uace0 API \ubc30\ud3ec(API Deployment) \ub2e8\uacc4\uc5d0\uc11c \ub9ce\uc740 \uc591\uc758 \ub370\uc774\ud130\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ub370\uc774\ud130 \uc18c\uc2a4\uc640 \uc11c\ube44\uc2a4\uc5d0 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \ucee4\ub125\ud130(connector) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \uc778\ucf54\ub354(encoder) & \ub514\ucf54\ub354(decoder) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ub370\uc774\ud130 \ubcc0\ud658\uacfc \ud53c\ucc98 \uc5d4\uc9c0\ub2c8\uc5b4\ub9c1(feature engineering) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ud559\uc2b5\uacfc \uc11c\ube59\uc744 \uc704\ud55c \ud655\uc7a5 \uac00\ub2a5\ud55c \ubc30\uce58, \uc2a4\ud2b8\ub9bc \ub370\uc774\ud130 \ucc98\ub9ac \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"3-model-training"},"3. Model training"),(0,r.kt)("p",null," \ubaa8\ub378 \ud559\uc2b5(Model training)\uc740 \ubaa8\ub378 \ud559\uc2b5\uc744 \uc704\ud55c \uc54c\uace0\ub9ac\uc998\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc2e4\ud589\uc2dc\ucf1c\uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"ML \ud504\ub808\uc784\uc6cc\ud06c\uc758 \uc2e4\ud589\uc744 \uc704\ud55c \ud658\uacbd \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc218\uc758 GPU / \ubd84\uc0b0 \ud559\uc2b5 \uc0ac\uc6a9\uc744 \uc704\ud55c \ubd84\uc0b0 \ud559\uc2b5 \ud658\uacbd \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130 \ud29c\ub2dd\uacfc \ucd5c\uc801\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"4-model-evaluation"},"4. Model evaluation"),(0,r.kt)("p",null," \ubaa8\ub378 \ud3c9\uac00(Model evaluation)\ub294 \uc2e4\ud5d8 \ud658\uacbd\uacfc \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uad00\ucc30\ud560 \uc218 \uc788\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ud3c9\uac00 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ubaa8\ub378 \uc131\ub2a5 \ud3c9\uac00 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc9c0\uc18d \ud559\uc2b5 \uc2e4\ud589 \uacb0\uacfc\uc5d0 \ub300\ud55c \uc608\uce21 \uc131\ub2a5 \ucd94\uc801"),(0,r.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \ubaa8\ub378\uc758 \uc131\ub2a5 \ube44\uad50\uc640 \uc2dc\uac01\ud654"),(0,r.kt)("li",{parentName:"ul"},"\ud574\uc11d\ud560 \uc218 \uc788\ub294 AI \uae30\uc220\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378 \ucd9c\ub825 \ud574\uc11d \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"5-model-serving"},"5. Model serving"),(0,r.kt)("p",null," \ubaa8\ub378 \uc11c\ube59(Model serving)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uc11c\ube59\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\uc800 \uc9c0\uc5f0 \ucd94\ub860\uacfc \uace0\uac00\uc6a9\uc131 \ucd94\ub860 \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c ML \ubaa8\ub378 \uc11c\ube59 \ud504\ub808\uc784\uc6cc\ud06c \uc9c0\uc6d0(Tensorflow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGGoost. etc)"),(0,r.kt)("li",{parentName:"ul"},"\ubcf5\uc7a1\ud55c \ud615\ud0dc\uc758 \ucd94\ub860 \ub8e8\ud2f4 \uae30\ub2a5 \uc81c\uacf5, \uc608\ub97c \ub4e4\uc5b4 \uc804\ucc98\ub9ac(preprocess) \ub610\ub294 \ud6c4\ucc98\ub9ac(postprocess) \uae30\ub2a5\uacfc \ucd5c\uc885 \uacb0\uacfc\ub97c \uc704\ud574 \ub2e4\uc218\uc758 \ubaa8\ub378\uc774 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\ub97c \ub9d0\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\uc21c\uac04\uc801\uc73c\ub85c \uce58\uc19f\ub294 \ucd94\ub860 \uc694\uccad\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud55c \uc624\ud1a0 \uc2a4\ucf00\uc77c\ub9c1(autoscaling) \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ucd94\ub860 \uc694\uccad\uacfc \ucd94\ub860 \uacb0\uacfc\uc5d0 \ub300\ud55c \ub85c\uae45 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"6-online-experimentation"},"6. Online experimentation"),(0,r.kt)("p",null," \uc628\ub77c\uc778 \uc2e4\ud5d8(Online experimentation)\uc740 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc774 \uc0dd\uc131\ub418\uc5c8\uc744 \ub54c, \uc774 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uba74 \uc5b4\ub290 \uc815\ub3c4\uc758 \uc131\ub2a5\uc744 \ubcf4\uc77c \uac83\uc778\uc9c0 \uac80\uc99d\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc774 \uae30\ub2a5\uc740 \uc0c8 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\ub294 \uac83\uae4c\uc9c0 \uc5f0\ub3d9\ud558\uae30 \uc704\ud574 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\uc640 \uc5f0\ub3d9\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\uce74\ub098\ub9ac(canary) & \uc100\ub3c4(shadow) \ubc30\ud3ec \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"A/B \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\uba40\ud2f0 \uc554\ub4dc \ubc34\ub527(Multi-armed bandit) \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5")),(0,r.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,r.kt)("p",null,"\ubaa8\ub378 \ubaa8\ub2c8\ud130\ub9c1(Model Monitoring)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \ubaa8\ub2c8\ud130\ub9c1\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4 \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \ub5a8\uc5b4\uc838 \uc5c5\ub370\uc774\ud2b8\uac00 \ud544\uc694\ud55c\uc9c0\uc5d0 \ub300\ud55c \uc815\ubcf4 \ub4f1\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,r.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ud30c\uc774\ud504\ub77c\uc778(ML Pipeline)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ubcf5\uc7a1\ud55c ML \ud559\uc2b5\uacfc \ucd94\ub860 \uc791\uc5c5\uc744 \uad6c\uc131\ud558\uace0 \uc81c\uc5b4\ud558\uace0 \uc790\ub3d9\ud654\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \uc774\ubca4\ud2b8\ub97c \uc18c\uc2a4\ub97c \ud1b5\ud55c \ud30c\uc774\ud504\ub77c\uc778 \uc2e4\ud589 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130\uc640 \uc0dd\uc131\ub418\ub294 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub97c \uc704\ud55c \uba38\uc2e0\ub7ec\ub2dd \uba54\ud0c0\ub370\uc774\ud130 \ucd94\uc801\uacfc \uc5f0\ub3d9 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc778 \uba38\uc2e0\ub7ec\ub2dd \uc791\uc5c5\uc744 \uc704\ud55c \ub0b4\uc7a5 \ucef4\ud3ec\ub10c\ud2b8 \uc9c0\uc6d0\uacfc \uc0ac\uc6a9\uc790\uac00 \uc9c1\uc811 \uad6c\ud604\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \uc9c0\uc6d0 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc2e4\ud589 \ud658\uacbd \uc81c\uacf5 \uae30\ub2a5")),(0,r.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,r.kt)("p",null," \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \uc0dd\uba85 \uc8fc\uae30(Lifecycle)\uc744 \uc911\uc559 \uc800\uc7a5\uc18c\uc5d0\uc11c \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378 \uadf8\ub9ac\uace0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ub300\ud55c \ub4f1\ub85d, \ucd94\uc801, \ubc84\uc800\ub2dd \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ubc30\ud3ec\ub97c \uc704\ud574 \ud544\uc694\ud55c \ub370\uc774\ud130\uc640 \ub7f0\ud0c0\uc784 \ud328\ud0a4\uc9c0\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4 \uc800\uc7a5 \uae30\ub2a5")),(0,r.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc5d0 \ub300\ud55c \uacf5\uc720, \uac80\uc0c9, \uc7ac\uc0ac\uc6a9 \uadf8\ub9ac\uace0 \ubc84\uc804 \uad00\ub9ac \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc774\ubca4\ud2b8 \uc2a4\ud2b8\ub9ac\ubc0d \ubc0f \uc628\ub77c\uc778 \ucd94\ub860 \uc791\uc5c5\uc5d0 \ub300\ud55c \uc2e4\uc2dc\uac04 \ucc98\ub9ac \ubc0f \uc800 \uc9c0\uc5f0 \uc11c\ube59 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc0ac\uc9c4, \ud14d\uc2a4\ud2b8, \ud14c\uc774\ube14 \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \uac19\uc740 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130 \uc9c0\uc6d0 \uae30\ub2a5")),(0,r.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,r.kt)("p",null," MLOps\uc758 \uac01 \ub2e8\uacc4\uc5d0\uc11c\ub294 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \uc0b0\ucd9c\ubb3c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. ML \uba54\ud0c0\ub370\uc774\ud130\ub294 \uc774\ub7f0 \uc0b0\ucd9c\ubb3c\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\nML \uba54\ud0c0\ub370\uc774\ud130\uc640 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub294 \uc0b0\ucd9c\ubb3c\uc758 \uc704\uce58, \ud0c0\uc785, \uc18d\uc131, \uadf8\ub9ac\uace0 \uad00\ub828\ub41c \uc2e4\ud5d8(experiment)\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uad00\ub9ac\ud558\uae30 \uc704\ud574 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \ud788\uc2a4\ud1a0\ub9ac \uad00\ub9ac \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"\uc2e4\ud5d8\uacfc \ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130 \uc124\uc815\uc5d0 \ub300\ud55c \ucd94\uc801, \uacf5\uc720 \uae30\ub2a5"),(0,r.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \uc800\uc7a5, \uc811\uadfc, \uc2dc\uac01\ud654, \ub2e4\uc6b4\ub85c\ub4dc \uae30\ub2a5 \uc81c\uacf5"),(0,r.kt)("li",{parentName:"ul"},"\ub2e4\ub978 MLOps \uae30\ub2a5\uacfc\uc758 \ud1b5\ud569 \uae30\ub2a5 \uc81c\uacf5")))}c.isMDXComponent=!0},8037:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file diff --git a/assets/js/5523074d.2f9c6695.js b/assets/js/5523074d.14a1622e.js similarity index 99% rename from assets/js/5523074d.2f9c6695.js rename to assets/js/5523074d.14a1622e.js index bdb52c8e..b1dfdbd4 100644 --- a/assets/js/5523074d.2f9c6695.js +++ b/assets/js/5523074d.14a1622e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4297],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),c=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(s.Provider,{value:t},e.children)},l="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),l=c(n),d=o,k=l["".concat(s,".").concat(d)]||l[d]||m[d]||a;return n?r.createElement(k,i(i({ref:t},u),{},{components:n})):r.createElement(k,i({ref:t},u))}));function k(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var p={};for(var s in t)hasOwnProperty.call(t,s)&&(p[s]=t[s]);p.originalType=e,p[l]="string"==typeof e?e:o,i[1]=p;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>p,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},i=void 0,p={unversionedId:"introduction/why_kubernetes",id:"version-1.0/introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/versioned_docs/version-1.0/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/docs/1.0/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/why_kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/docs/1.0/introduction/component"},next:{title:"1. Introduction",permalink:"/docs/1.0/setup-kubernetes/intro"}},s={},c=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:c},l="wrapper";function m(e){let{components:t,...a}=e;return(0,o.kt)(l,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 MLOps\ub97c \uc774\uc57c\uae30\ud560 \ub54c, \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\ub77c\ub294 \ub2e8\uc5b4\uac00 \ud56d\uc0c1 \ud568\uaed8 \ub4e4\ub9ac\ub294 \uc774\uc720\uac00 \ubb34\uc5c7\uc77c\uae4c\uc694?"),(0,o.kt)("p",null,"\uc131\uacf5\uc801\uc778 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\ucd95\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c")," \uc5d0\uc11c \uc124\uba85\ud55c \uac83\ucc98\ub7fc \ub2e4\uc591\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \ud544\uc694\ud558\uc9c0\ub9cc, \uac01\uac01\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \uc720\uae30\uc801\uc73c\ub85c \uc6b4\uc601\ub418\uae30 \uc704\ud574\uc11c\ub294 \uc778\ud504\ub77c \ub808\ubca8\uc5d0\uc11c \uc218\ub9ce\uc740 \uc774\uc288\ub97c \ud574\uacb0\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uac04\ub2e8\ud558\uac8c\ub294 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud559\uc2b5 \uc694\uccad\uc744 \ucc28\ub840\ub300\ub85c \uc2e4\ud589\ud558\ub294 \uac83, \ub2e4\ub978 \uc791\uc5c5 \uacf5\uac04\uc5d0\uc11c\ub3c4 \uac19\uc740 \uc2e4\ud589 \ud658\uacbd\uc744 \ubcf4\uc7a5\ud574\uc57c \ud558\ub294 \uac83, \ubc30\ud3ec\ub41c \uc11c\ube44\uc2a4\uc5d0 \uc7a5\uc560\uac00 \uc0dd\uacbc\uc744 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud558\ub294 \uac83 \ub4f1\uc758 \uc774\uc288 \ub4f1\uc744 \uc0dd\uac01\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c \ucee8\ud14c\uc774\ub108(Container)\uc640 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c(Container Orchestration System)\uc758 \ud544\uc694\uc131\uc774 \ub4f1\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud558\uba74 \uc2e4\ud589 \ud658\uacbd\uc758 \uaca9\ub9ac\uc640 \uad00\ub9ac\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud55c\ub2e4\uba74, \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uace0 \ubc30\ud3ec\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub2e4\uc218\uc758 \uac1c\ubc1c\uc790\uac00 \uc18c\uc218\uc758 \ud074\ub7ec\uc2a4\ud130\ub97c \uacf5\uc720\ud558\uba74\uc11c ",(0,o.kt)("em",{parentName:"p"},"'1\ubc88 \ud074\ub7ec\uc2a4\ud130 \uc0ac\uc6a9 \uc911\uc774\uc2e0\uac00\uc694?', 'GPU \uc0ac\uc6a9 \uc911\uc774\ub358 \uc81c \ud504\ub85c\uc138\uc2a4 \ub204\uac00 \uc8fd\uc600\ub098\uc694?', '\ub204\uac00 \ud074\ub7ec\uc2a4\ud130\uc5d0 x \ud328\ud0a4\uc9c0 \uc5c5\ub370\uc774\ud2b8\ud588\ub098\uc694?'")," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108\ub780 \ubb34\uc5c7\uc77c\uae4c\uc694? \ub9c8\uc774\ud06c\ub85c\uc18c\ud504\ud2b8\uc5d0\uc11c\ub294 \ucee8\ud14c\uc774\ub108\ub97c ",(0,o.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/ko-kr/overview/what-is-a-container/"},"\ub2e4\uc74c"),"\uacfc \uac19\uc774 \uc815\uc758\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"\ucee8\ud14c\uc774\ub108\ub780 : \uc560\ud50c\ub9ac\ucf00\uc774\uc158\uc758 \ud45c\uc900\ud654\ub41c \uc774\uc2dd \uac00\ub2a5\ud55c \ud328\ud0a4\uc9d5")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc65c \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ucee8\ud14c\uc774\ub108\uac00 \ud544\uc694\ud560\uae4c\uc694? \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc6b4\uc601\uccb4\uc81c\ub098 Python \uc2e4\ud589 \ud658\uacbd, \ud328\ud0a4\uc9c0 \ubc84\uc804 \ub4f1\uc5d0 \ub530\ub77c \ub2e4\ub974\uac8c \ub3d9\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \ubc29\uc9c0\ud558\uae30 \uc704\ud574\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc5d0 \uc0ac\uc6a9\ub41c \uc18c\uc2a4 \ucf54\ub4dc\uc640 \ud568\uaed8 \uc885\uc18d\uc801\uc778 \uc2e4\ud589 \ud658\uacbd \uc804\uccb4\ub97c ",(0,o.kt)("strong",{parentName:"p"},"\ud558\ub098\ub85c \ubb36\uc5b4\uc11c(\ud328\ud0a4\uc9d5\ud574\uc11c)")," \uacf5\uc720\ud558\uace0 \uc2e4\ud589\ud558\ub294 \ub370 \ud65c\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\uc220\uc785\ub2c8\ub2e4.\n\uc774\ub807\uac8c \ud328\ud0a4\uc9d5\ub41c \ud615\ud0dc\ub97c \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub974\uba70, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud568\uc73c\ub85c\uc368 \uc0ac\uc6a9\uc790\ub4e4\uc740 \uc5b4\ub5a4 \uc2dc\uc2a4\ud15c\uc5d0\uc11c\ub4e0 \uac19\uc740 \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud560 \uc218 \uc788\uac8c \ub429\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc989, \ub2e8\uc21c\ud788 Jupyter Notebook \ud30c\uc77c\uc774\ub098, \ubaa8\ub378\uc758 \uc18c\uc2a4 \ucf54\ub4dc\uc640 requirements.txt \ud30c\uc77c\uc744 \uacf5\uc720\ud558\ub294 \uac83\uc774 \uc544\ub2cc, \ubaa8\ub4e0 \uc2e4\ud589 \ud658\uacbd\uc774 \ub2f4\uae34 \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud55c\ub2e4\uba74 ",(0,o.kt)("em",{parentName:"p"},'"\uc81c \ub178\ud2b8\ubd81\uc5d0\uc11c\ub294 \uc798 \ub418\ub294\ub370\uc694?"')," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ud53c\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,'\ucee8\ud14c\uc774\ub108\ub97c \ucc98\uc74c \uc811\ud558\uc2dc\ub294 \ubd84\ub4e4\uc774 \ud754\ud788 \ud558\uc2dc\ub294 \uc624\ud574 \uc911 \ud558\ub098\ub294 "',(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108 == \ub3c4\ucee4"),'"\ub77c\uace0 \ubc1b\uc544\ub4e4\uc774\ub294 \uac83\uc785\ub2c8\ub2e4.',(0,o.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\ub294 \ucee8\ud14c\uc774\ub108\uc640 \uac19\uc740 \uc758\ubbf8\ub97c \uc9c0\ub2c8\ub294 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c, \ucee8\ud14c\uc774\ub108\ub97c \ub744\uc6b0\uac70\ub098, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4\uace0 \uacf5\uc720\ud558\ub294 \uac83\uacfc \uac19\uc774 \ucee8\ud14c\uc774\ub108\ub97c \ub354\uc6b1\ub354 \uc27d\uace0 \uc720\uc5f0\ud558\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc815\ub9ac\ud558\uc790\uba74 \ucee8\ud14c\uc774\ub108\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc774\uace0, \ub3c4\ucee4\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc758 \uad6c\ud604\uccb4\ub77c\uace0 \ub9d0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, \ub3c4\ucee4\ub294 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \ub3c4\uad6c \uc911\uc5d0\uc11c \uc26c\uc6b4 \uc0ac\uc6a9\uc131\uacfc \ub192\uc740 \ud6a8\uc728\uc131\uc744 \ubc14\ud0d5\uc73c\ub85c \uac00\uc7a5 \ube60\ub974\uac8c \uc131\uc7a5\ud558\uc5ec \ub300\uc138\uac00 \ub418\uc5c8\uae30\uc5d0 \ucee8\ud14c\uc774\ub108\ud558\uba74 \ub3c4\ucee4\ub77c\ub294 \uc774\ubbf8\uc9c0\uac00 \uc790\ub3d9\uc73c\ub85c \ub5a0\uc624\ub974\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ucee8\ud14c\uc774\ub108\uc640 \ub3c4\ucee4 \uc0dd\ud0dc\uacc4\uac00 \ub300\uc138\uac00 \ub418\uae30\uae4c\uc9c0\ub294 \ub2e4\uc591\ud55c \uc774\uc720\uac00 \uc788\uc9c0\ub9cc, \uae30\uc220\uc801\uc73c\ub85c \uc790\uc138\ud55c \uc774\uc57c\uae30\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ubc94\uc704\ub97c \ub118\uc5b4\uc11c\uae30 \ub54c\ubb38\uc5d0 \ub2e4\ub8e8\uc9c0\ub294 \uc54a\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ud639\uc740 \ub3c4\ucee4\ub97c \ucc98\uc74c \ub4e4\uc5b4\ubcf4\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ub0b4\uc6a9\uc774 \ub2e4\uc18c \uc5b4\ub835\uac8c \ub290\uaef4\uc9c8 \uc218 \uc788\uc73c\ubbc0\ub85c, ",(0,o.kt)("a",{parentName:"p",href:"https://opentutorials.org/course/4781"},"\uc0dd\ud65c\ucf54\ub529"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/2017/01/19/docker-guide-for-beginners-1.html"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc790\ub8cc\ub97c \uba3c\uc800 \uc0b4\ud3b4\ubcf4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694? ",(0,o.kt)("strong",{parentName:"p"},"\uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158"),"\uc774\ub77c\ub294 \ub2e8\uc5b4\uc5d0\uc11c \ucd94\uce21\ud574 \ubcfc \uc218 \uc788\ub4ef\uc774, \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc744 \ub54c \ucee8\ud14c\uc774\ub108\ub4e4\uc774 \uc11c\ub85c \uc870\ud654\ub86d\uac8c \uad6c\ub3d9\ub420 \uc218 \uc788\ub3c4\ub85d \uc9c0\ud718\ud558\ub294 \uc2dc\uc2a4\ud15c\uc5d0 \ube44\uc720\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \uae30\ubc18\uc758 \uc2dc\uc2a4\ud15c\uc5d0\uc11c \uc11c\ube44\uc2a4\ub294 \ucee8\ud14c\uc774\ub108\uc758 \ud615\ud0dc\ub85c \uc0ac\uc6a9\uc790\ub4e4\uc5d0\uac8c \uc81c\uacf5\ub429\ub2c8\ub2e4. \uc774\ub54c \uad00\ub9ac\ud574\uc57c \ud560 \ucee8\ud14c\uc774\ub108\uc758 \uc218\uac00 \uc801\ub2e4\uba74 \uc6b4\uc601 \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774\uc11c\ub3c4 \ucda9\ubd84\ud788 \ubaa8\ub4e0 \uc0c1\ud669\uc5d0 \ub300\uc751\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc, \uc218\ubc31 \uac1c \uc774\uc0c1\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc218 \uc2ed \ub300 \uc774\uc0c1\uc758 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uad6c\ub3d9\ub418\uace0 \uc788\uace0 \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\uc9c0 \uc54a\uace0 \ud56d\uc0c1 \uc815\uc0c1 \ub3d9\uc791\ud574\uc57c \ud55c\ub2e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uc758 \uc815\uc0c1 \ub3d9\uc791 \uc5ec\ubd80\ub97c \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774 \ud30c\uc545\ud558\uace0 \uc774\uc288\uc5d0 \ub300\uc751\ud558\ub294 \uac83\uc740 \ubd88\uac00\ub2a5\uc5d0 \uac00\uae5d\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \ubaa8\ub2c8\ud130\ub9c1(Monitoring)\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud2b9\uc815 \uc11c\ube44\uc2a4\uac00 \uc7a5\uc560\ub97c \uc77c\uc73c\ucf30\ub2e4\uba74 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108\uc758 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uac00\uba70 \ubb38\uc81c\ub97c \ud30c\uc545\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub610\ud55c, \ud2b9\uc815 \ud074\ub7ec\uc2a4\ud130\ub098 \ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uc5d0 \uc791\uc5c5\uc774 \ubab0\ub9ac\uc9c0 \uc54a\ub3c4\ub85d \uc2a4\ucf00\uc904\ub9c1(Scheduling)\ud558\uace0 \ub85c\ub4dc \ubc38\ub7f0\uc2f1(Load Balancing)\ud558\uba70, \uc2a4\ucf00\uc77c\ub9c1(Scaling)\ud558\ub294 \ub4f1\uc758 \uc218\ub9ce\uc740 \uc791\uc5c5\uc744 \ub2f4\ub2f9\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uc774\ub807\uac8c \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ub9ac\ud558\uace0 \uc6b4\uc601\ud558\ub294 \uacfc\uc815\uc744 \uc870\uae08\uc774\ub098\ub9c8 \uc27d\uac8c, \uc790\ub3d9\uc73c\ub85c \ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uac00 \ubc14\ub85c \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc785\ub2c8\ub2e4. "),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c\ub294 \uc5b4\ub5bb\uac8c \uc4f0\uc77c \uc218 \uc788\uc744\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c GPU\uac00 \uc788\uc5b4\uc57c \ud558\ub294 \ub525\ub7ec\ub2dd \ud559\uc2b5 \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc0ac\uc6a9 \uac00\ub2a5\ud55c GPU\uac00 \uc788\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ub9ce\uc740 \uba54\ubaa8\ub9ac\ub97c \ud544\uc694\ub85c \ud558\ub294 \ub370\uc774\ud130 \uc804\ucc98\ub9ac \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uba54\ubaa8\ub9ac\uc758 \uc5ec\uc720\uac00 \ub9ce\uc740 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ud559\uc2b5 \uc911\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubb38\uc81c\uac00 \uc0dd\uae30\uba74 \uc790\ub3d9\uc73c\ub85c \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \ub2e4\ub978 \ud074\ub7ec\uc2a4\ud130\ub85c \uc774\ub3d9\uc2dc\ud0a4\uace0 \ub2e4\uc2dc \ud559\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 \ub4f1\uc758 \uc791\uc5c5\uc744 \uc0ac\ub78c\uc774 \uc77c\uc77c\uc774 \uc218\ud589\ud558\uc9c0 \uc54a\uace0, \uc790\ub3d9\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \uc2dc\uc2a4\ud15c\uc744 \uac1c\ubc1c\ud55c \ub4a4 \ub9e1\uae30\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc9d1\ud544\uc744 \ud558\ub294 2022\ub144\uc744 \uae30\uc900\uc73c\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc758 \uc0ac\uc2e4\uc0c1\uc758 \ud45c\uc900(De facto standard)\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"CNCF\uc5d0\uc11c 2018\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"Survey")," \uc5d0 \ub530\ub974\uba74 \ub2e4\uc74c \uadf8\ub9bc\uacfc \uac19\uc774 \uc774\ubbf8 \ub450\uac01\uc744 \ub098\ud0c0\ub0b4\uace0 \uc788\uc5c8\uc73c\uba70, 2019\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"Survey"),"\uc5d0 \ub530\ub974\uba74 \uadf8\uc911 78%\uac00 \uc0c1\uc6a9 \uc218\uc900(Production Level)\uc5d0\uc11c \uc0ac\uc6a9\ud558\uace0 \uc788\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"k8s-graph",src:n(2745).Z,width:"2048",height:"1317"})),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0dd\ud0dc\uacc4\uac00 \uc774\ucc98\ub7fc \ucee4\uc9c0\uac8c \ub41c \uc774\uc720\uc5d0\ub294 \uc5ec\ub7ec \uac00\uc9c0 \uc774\uc720\uac00 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \ub3c4\ucee4\uc640 \ub9c8\ucc2c\uac00\uc9c0\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc5ed\uc2dc \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc11c\ube44\uc2a4\uc5d0\uc11c\ub9cc \uc0ac\uc6a9\ud558\ub294 \uae30\uc220\uc774 \uc544\ub2c8\uae30\uc5d0, \uc790\uc138\ud788 \ub2e4\ub8e8\uae30\uc5d0\ub294 \uc0c1\ub2f9\ud788 \ub9ce\uc740 \uc591\uc758 \uae30\uc220\uc801\uc778 \ub0b4\uc6a9\uc744 \ub2e4\ub8e8\uc5b4\uc57c \ud558\ubbc0\ub85c \uc774\ubc88 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc0dd\ub7b5\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc55e\uc73c\ub85c \ub2e4\ub8f0 \ub0b4\uc6a9\uc740 \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud55c \ub0b4\uc6a9\uc744 \uc5b4\ub290 \uc815\ub3c4 \uc54c\uace0 \uacc4\uc2e0 \ubd84\ub4e4\uc744 \ub300\uc0c1\uc73c\ub85c \uc791\uc131\ud558\uc600\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud574 \uc775\uc219\ud558\uc9c0 \uc54a\uc73c\uc2e0 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/overview/what-is-kubernetes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uacf5\uc2dd \ubb38\uc11c"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/k8s/"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc27d\uace0 \uc790\uc138\ud55c \uc790\ub8cc\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\uc8fc\uc2dc\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0},2745:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4297],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),c=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(s.Provider,{value:t},e.children)},l="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),l=c(n),d=o,k=l["".concat(s,".").concat(d)]||l[d]||m[d]||a;return n?r.createElement(k,i(i({ref:t},u),{},{components:n})):r.createElement(k,i({ref:t},u))}));function k(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var p={};for(var s in t)hasOwnProperty.call(t,s)&&(p[s]=t[s]);p.originalType=e,p[l]="string"==typeof e?e:o,i[1]=p;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>p,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},i=void 0,p={unversionedId:"introduction/why_kubernetes",id:"version-1.0/introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/versioned_docs/version-1.0/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/docs/1.0/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/why_kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/docs/1.0/introduction/component"},next:{title:"1. Introduction",permalink:"/docs/1.0/setup-kubernetes/intro"}},s={},c=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:c},l="wrapper";function m(e){let{components:t,...a}=e;return(0,o.kt)(l,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 MLOps\ub97c \uc774\uc57c\uae30\ud560 \ub54c, \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\ub77c\ub294 \ub2e8\uc5b4\uac00 \ud56d\uc0c1 \ud568\uaed8 \ub4e4\ub9ac\ub294 \uc774\uc720\uac00 \ubb34\uc5c7\uc77c\uae4c\uc694?"),(0,o.kt)("p",null,"\uc131\uacf5\uc801\uc778 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\ucd95\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c")," \uc5d0\uc11c \uc124\uba85\ud55c \uac83\ucc98\ub7fc \ub2e4\uc591\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \ud544\uc694\ud558\uc9c0\ub9cc, \uac01\uac01\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \uc720\uae30\uc801\uc73c\ub85c \uc6b4\uc601\ub418\uae30 \uc704\ud574\uc11c\ub294 \uc778\ud504\ub77c \ub808\ubca8\uc5d0\uc11c \uc218\ub9ce\uc740 \uc774\uc288\ub97c \ud574\uacb0\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uac04\ub2e8\ud558\uac8c\ub294 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud559\uc2b5 \uc694\uccad\uc744 \ucc28\ub840\ub300\ub85c \uc2e4\ud589\ud558\ub294 \uac83, \ub2e4\ub978 \uc791\uc5c5 \uacf5\uac04\uc5d0\uc11c\ub3c4 \uac19\uc740 \uc2e4\ud589 \ud658\uacbd\uc744 \ubcf4\uc7a5\ud574\uc57c \ud558\ub294 \uac83, \ubc30\ud3ec\ub41c \uc11c\ube44\uc2a4\uc5d0 \uc7a5\uc560\uac00 \uc0dd\uacbc\uc744 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud558\ub294 \uac83 \ub4f1\uc758 \uc774\uc288 \ub4f1\uc744 \uc0dd\uac01\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c \ucee8\ud14c\uc774\ub108(Container)\uc640 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c(Container Orchestration System)\uc758 \ud544\uc694\uc131\uc774 \ub4f1\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud558\uba74 \uc2e4\ud589 \ud658\uacbd\uc758 \uaca9\ub9ac\uc640 \uad00\ub9ac\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud55c\ub2e4\uba74, \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uace0 \ubc30\ud3ec\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub2e4\uc218\uc758 \uac1c\ubc1c\uc790\uac00 \uc18c\uc218\uc758 \ud074\ub7ec\uc2a4\ud130\ub97c \uacf5\uc720\ud558\uba74\uc11c ",(0,o.kt)("em",{parentName:"p"},"'1\ubc88 \ud074\ub7ec\uc2a4\ud130 \uc0ac\uc6a9 \uc911\uc774\uc2e0\uac00\uc694?', 'GPU \uc0ac\uc6a9 \uc911\uc774\ub358 \uc81c \ud504\ub85c\uc138\uc2a4 \ub204\uac00 \uc8fd\uc600\ub098\uc694?', '\ub204\uac00 \ud074\ub7ec\uc2a4\ud130\uc5d0 x \ud328\ud0a4\uc9c0 \uc5c5\ub370\uc774\ud2b8\ud588\ub098\uc694?'")," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108\ub780 \ubb34\uc5c7\uc77c\uae4c\uc694? \ub9c8\uc774\ud06c\ub85c\uc18c\ud504\ud2b8\uc5d0\uc11c\ub294 \ucee8\ud14c\uc774\ub108\ub97c ",(0,o.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/ko-kr/overview/what-is-a-container/"},"\ub2e4\uc74c"),"\uacfc \uac19\uc774 \uc815\uc758\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"\ucee8\ud14c\uc774\ub108\ub780 : \uc560\ud50c\ub9ac\ucf00\uc774\uc158\uc758 \ud45c\uc900\ud654\ub41c \uc774\uc2dd \uac00\ub2a5\ud55c \ud328\ud0a4\uc9d5")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc65c \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ucee8\ud14c\uc774\ub108\uac00 \ud544\uc694\ud560\uae4c\uc694? \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc6b4\uc601\uccb4\uc81c\ub098 Python \uc2e4\ud589 \ud658\uacbd, \ud328\ud0a4\uc9c0 \ubc84\uc804 \ub4f1\uc5d0 \ub530\ub77c \ub2e4\ub974\uac8c \ub3d9\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \ubc29\uc9c0\ud558\uae30 \uc704\ud574\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc5d0 \uc0ac\uc6a9\ub41c \uc18c\uc2a4 \ucf54\ub4dc\uc640 \ud568\uaed8 \uc885\uc18d\uc801\uc778 \uc2e4\ud589 \ud658\uacbd \uc804\uccb4\ub97c ",(0,o.kt)("strong",{parentName:"p"},"\ud558\ub098\ub85c \ubb36\uc5b4\uc11c(\ud328\ud0a4\uc9d5\ud574\uc11c)")," \uacf5\uc720\ud558\uace0 \uc2e4\ud589\ud558\ub294 \ub370 \ud65c\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\uc220\uc785\ub2c8\ub2e4.\n\uc774\ub807\uac8c \ud328\ud0a4\uc9d5\ub41c \ud615\ud0dc\ub97c \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub974\uba70, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud568\uc73c\ub85c\uc368 \uc0ac\uc6a9\uc790\ub4e4\uc740 \uc5b4\ub5a4 \uc2dc\uc2a4\ud15c\uc5d0\uc11c\ub4e0 \uac19\uc740 \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud560 \uc218 \uc788\uac8c \ub429\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc989, \ub2e8\uc21c\ud788 Jupyter Notebook \ud30c\uc77c\uc774\ub098, \ubaa8\ub378\uc758 \uc18c\uc2a4 \ucf54\ub4dc\uc640 requirements.txt \ud30c\uc77c\uc744 \uacf5\uc720\ud558\ub294 \uac83\uc774 \uc544\ub2cc, \ubaa8\ub4e0 \uc2e4\ud589 \ud658\uacbd\uc774 \ub2f4\uae34 \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud55c\ub2e4\uba74 ",(0,o.kt)("em",{parentName:"p"},'"\uc81c \ub178\ud2b8\ubd81\uc5d0\uc11c\ub294 \uc798 \ub418\ub294\ub370\uc694?"')," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ud53c\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,'\ucee8\ud14c\uc774\ub108\ub97c \ucc98\uc74c \uc811\ud558\uc2dc\ub294 \ubd84\ub4e4\uc774 \ud754\ud788 \ud558\uc2dc\ub294 \uc624\ud574 \uc911 \ud558\ub098\ub294 "',(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108 == \ub3c4\ucee4"),'"\ub77c\uace0 \ubc1b\uc544\ub4e4\uc774\ub294 \uac83\uc785\ub2c8\ub2e4.',(0,o.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\ub294 \ucee8\ud14c\uc774\ub108\uc640 \uac19\uc740 \uc758\ubbf8\ub97c \uc9c0\ub2c8\ub294 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c, \ucee8\ud14c\uc774\ub108\ub97c \ub744\uc6b0\uac70\ub098, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4\uace0 \uacf5\uc720\ud558\ub294 \uac83\uacfc \uac19\uc774 \ucee8\ud14c\uc774\ub108\ub97c \ub354\uc6b1\ub354 \uc27d\uace0 \uc720\uc5f0\ud558\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc815\ub9ac\ud558\uc790\uba74 \ucee8\ud14c\uc774\ub108\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc774\uace0, \ub3c4\ucee4\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc758 \uad6c\ud604\uccb4\ub77c\uace0 \ub9d0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, \ub3c4\ucee4\ub294 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \ub3c4\uad6c \uc911\uc5d0\uc11c \uc26c\uc6b4 \uc0ac\uc6a9\uc131\uacfc \ub192\uc740 \ud6a8\uc728\uc131\uc744 \ubc14\ud0d5\uc73c\ub85c \uac00\uc7a5 \ube60\ub974\uac8c \uc131\uc7a5\ud558\uc5ec \ub300\uc138\uac00 \ub418\uc5c8\uae30\uc5d0 \ucee8\ud14c\uc774\ub108\ud558\uba74 \ub3c4\ucee4\ub77c\ub294 \uc774\ubbf8\uc9c0\uac00 \uc790\ub3d9\uc73c\ub85c \ub5a0\uc624\ub974\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ucee8\ud14c\uc774\ub108\uc640 \ub3c4\ucee4 \uc0dd\ud0dc\uacc4\uac00 \ub300\uc138\uac00 \ub418\uae30\uae4c\uc9c0\ub294 \ub2e4\uc591\ud55c \uc774\uc720\uac00 \uc788\uc9c0\ub9cc, \uae30\uc220\uc801\uc73c\ub85c \uc790\uc138\ud55c \uc774\uc57c\uae30\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ubc94\uc704\ub97c \ub118\uc5b4\uc11c\uae30 \ub54c\ubb38\uc5d0 \ub2e4\ub8e8\uc9c0\ub294 \uc54a\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ud639\uc740 \ub3c4\ucee4\ub97c \ucc98\uc74c \ub4e4\uc5b4\ubcf4\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ub0b4\uc6a9\uc774 \ub2e4\uc18c \uc5b4\ub835\uac8c \ub290\uaef4\uc9c8 \uc218 \uc788\uc73c\ubbc0\ub85c, ",(0,o.kt)("a",{parentName:"p",href:"https://opentutorials.org/course/4781"},"\uc0dd\ud65c\ucf54\ub529"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/2017/01/19/docker-guide-for-beginners-1.html"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc790\ub8cc\ub97c \uba3c\uc800 \uc0b4\ud3b4\ubcf4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694? ",(0,o.kt)("strong",{parentName:"p"},"\uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158"),"\uc774\ub77c\ub294 \ub2e8\uc5b4\uc5d0\uc11c \ucd94\uce21\ud574 \ubcfc \uc218 \uc788\ub4ef\uc774, \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc744 \ub54c \ucee8\ud14c\uc774\ub108\ub4e4\uc774 \uc11c\ub85c \uc870\ud654\ub86d\uac8c \uad6c\ub3d9\ub420 \uc218 \uc788\ub3c4\ub85d \uc9c0\ud718\ud558\ub294 \uc2dc\uc2a4\ud15c\uc5d0 \ube44\uc720\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \uae30\ubc18\uc758 \uc2dc\uc2a4\ud15c\uc5d0\uc11c \uc11c\ube44\uc2a4\ub294 \ucee8\ud14c\uc774\ub108\uc758 \ud615\ud0dc\ub85c \uc0ac\uc6a9\uc790\ub4e4\uc5d0\uac8c \uc81c\uacf5\ub429\ub2c8\ub2e4. \uc774\ub54c \uad00\ub9ac\ud574\uc57c \ud560 \ucee8\ud14c\uc774\ub108\uc758 \uc218\uac00 \uc801\ub2e4\uba74 \uc6b4\uc601 \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774\uc11c\ub3c4 \ucda9\ubd84\ud788 \ubaa8\ub4e0 \uc0c1\ud669\uc5d0 \ub300\uc751\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc, \uc218\ubc31 \uac1c \uc774\uc0c1\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc218 \uc2ed \ub300 \uc774\uc0c1\uc758 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uad6c\ub3d9\ub418\uace0 \uc788\uace0 \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\uc9c0 \uc54a\uace0 \ud56d\uc0c1 \uc815\uc0c1 \ub3d9\uc791\ud574\uc57c \ud55c\ub2e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uc758 \uc815\uc0c1 \ub3d9\uc791 \uc5ec\ubd80\ub97c \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774 \ud30c\uc545\ud558\uace0 \uc774\uc288\uc5d0 \ub300\uc751\ud558\ub294 \uac83\uc740 \ubd88\uac00\ub2a5\uc5d0 \uac00\uae5d\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \ubaa8\ub2c8\ud130\ub9c1(Monitoring)\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud2b9\uc815 \uc11c\ube44\uc2a4\uac00 \uc7a5\uc560\ub97c \uc77c\uc73c\ucf30\ub2e4\uba74 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108\uc758 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uac00\uba70 \ubb38\uc81c\ub97c \ud30c\uc545\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub610\ud55c, \ud2b9\uc815 \ud074\ub7ec\uc2a4\ud130\ub098 \ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uc5d0 \uc791\uc5c5\uc774 \ubab0\ub9ac\uc9c0 \uc54a\ub3c4\ub85d \uc2a4\ucf00\uc904\ub9c1(Scheduling)\ud558\uace0 \ub85c\ub4dc \ubc38\ub7f0\uc2f1(Load Balancing)\ud558\uba70, \uc2a4\ucf00\uc77c\ub9c1(Scaling)\ud558\ub294 \ub4f1\uc758 \uc218\ub9ce\uc740 \uc791\uc5c5\uc744 \ub2f4\ub2f9\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uc774\ub807\uac8c \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ub9ac\ud558\uace0 \uc6b4\uc601\ud558\ub294 \uacfc\uc815\uc744 \uc870\uae08\uc774\ub098\ub9c8 \uc27d\uac8c, \uc790\ub3d9\uc73c\ub85c \ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uac00 \ubc14\ub85c \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc785\ub2c8\ub2e4. "),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c\ub294 \uc5b4\ub5bb\uac8c \uc4f0\uc77c \uc218 \uc788\uc744\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c GPU\uac00 \uc788\uc5b4\uc57c \ud558\ub294 \ub525\ub7ec\ub2dd \ud559\uc2b5 \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc0ac\uc6a9 \uac00\ub2a5\ud55c GPU\uac00 \uc788\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ub9ce\uc740 \uba54\ubaa8\ub9ac\ub97c \ud544\uc694\ub85c \ud558\ub294 \ub370\uc774\ud130 \uc804\ucc98\ub9ac \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uba54\ubaa8\ub9ac\uc758 \uc5ec\uc720\uac00 \ub9ce\uc740 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ud559\uc2b5 \uc911\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubb38\uc81c\uac00 \uc0dd\uae30\uba74 \uc790\ub3d9\uc73c\ub85c \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \ub2e4\ub978 \ud074\ub7ec\uc2a4\ud130\ub85c \uc774\ub3d9\uc2dc\ud0a4\uace0 \ub2e4\uc2dc \ud559\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 \ub4f1\uc758 \uc791\uc5c5\uc744 \uc0ac\ub78c\uc774 \uc77c\uc77c\uc774 \uc218\ud589\ud558\uc9c0 \uc54a\uace0, \uc790\ub3d9\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \uc2dc\uc2a4\ud15c\uc744 \uac1c\ubc1c\ud55c \ub4a4 \ub9e1\uae30\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc9d1\ud544\uc744 \ud558\ub294 2022\ub144\uc744 \uae30\uc900\uc73c\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc758 \uc0ac\uc2e4\uc0c1\uc758 \ud45c\uc900(De facto standard)\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"CNCF\uc5d0\uc11c 2018\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"Survey")," \uc5d0 \ub530\ub974\uba74 \ub2e4\uc74c \uadf8\ub9bc\uacfc \uac19\uc774 \uc774\ubbf8 \ub450\uac01\uc744 \ub098\ud0c0\ub0b4\uace0 \uc788\uc5c8\uc73c\uba70, 2019\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"Survey"),"\uc5d0 \ub530\ub974\uba74 \uadf8\uc911 78%\uac00 \uc0c1\uc6a9 \uc218\uc900(Production Level)\uc5d0\uc11c \uc0ac\uc6a9\ud558\uace0 \uc788\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"k8s-graph",src:n(2745).Z,width:"2048",height:"1317"})),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0dd\ud0dc\uacc4\uac00 \uc774\ucc98\ub7fc \ucee4\uc9c0\uac8c \ub41c \uc774\uc720\uc5d0\ub294 \uc5ec\ub7ec \uac00\uc9c0 \uc774\uc720\uac00 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \ub3c4\ucee4\uc640 \ub9c8\ucc2c\uac00\uc9c0\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc5ed\uc2dc \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc11c\ube44\uc2a4\uc5d0\uc11c\ub9cc \uc0ac\uc6a9\ud558\ub294 \uae30\uc220\uc774 \uc544\ub2c8\uae30\uc5d0, \uc790\uc138\ud788 \ub2e4\ub8e8\uae30\uc5d0\ub294 \uc0c1\ub2f9\ud788 \ub9ce\uc740 \uc591\uc758 \uae30\uc220\uc801\uc778 \ub0b4\uc6a9\uc744 \ub2e4\ub8e8\uc5b4\uc57c \ud558\ubbc0\ub85c \uc774\ubc88 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc0dd\ub7b5\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc55e\uc73c\ub85c \ub2e4\ub8f0 \ub0b4\uc6a9\uc740 \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud55c \ub0b4\uc6a9\uc744 \uc5b4\ub290 \uc815\ub3c4 \uc54c\uace0 \uacc4\uc2e0 \ubd84\ub4e4\uc744 \ub300\uc0c1\uc73c\ub85c \uc791\uc131\ud558\uc600\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud574 \uc775\uc219\ud558\uc9c0 \uc54a\uc73c\uc2e0 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/overview/what-is-kubernetes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uacf5\uc2dd \ubb38\uc11c"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/k8s/"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc27d\uace0 \uc790\uc138\ud55c \uc790\ub8cc\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\uc8fc\uc2dc\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0},2745:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file diff --git a/assets/js/57b26f6a.a561200f.js b/assets/js/57b26f6a.f4f5173c.js similarity index 99% rename from assets/js/57b26f6a.a561200f.js rename to assets/js/57b26f6a.f4f5173c.js index 69ad58f7..a8092dc6 100644 --- a/assets/js/57b26f6a.a561200f.js +++ b/assets/js/57b26f6a.f4f5173c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8693],{3905:(t,e,n)=>{n.d(e,{Zo:()=>d,kt:()=>f});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function p(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),u=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):p(p({},e),t)),n},d=function(t){var e=u(t.components);return a.createElement(l.Provider,{value:e},t.children)},s="mdxType",m={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},c=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,o=t.originalType,l=t.parentName,d=i(t,["components","mdxType","originalType","parentName"]),s=u(n),c=r,f=s["".concat(l,".").concat(c)]||s[c]||m[c]||o;return n?a.createElement(f,p(p({ref:e},d),{},{components:n})):a.createElement(f,p({ref:e},d))}));function f(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var o=n.length,p=new Array(o);p[0]=c;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[s]="string"==typeof t?t:r,p[1]=i;for(var u=2;u{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>p,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var a=n(7462),r=(n(7294),n(3905));const o={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},p=void 0,i={unversionedId:"kubeflow/advanced-component",id:"version-1.0/kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/docs/1.0/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/docs/1.0/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/docs/1.0/kubeflow/advanced-environment"}},l={},u=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule to use InputPath/OutputPath",id:"rule-to-use-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],d={toc:u},s="wrapper";function m(t){let{components:e,...n}=t;return(0,r.kt)(s,(0,a.Z)({},d,n,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," \uc608\uc2dc\ub85c \ub098\uc654\ub358 \ucf54\ub4dc\ub97c \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"\uc544\ub798 \ucf54\ub4dc\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"\uc5d0\uc11c \uc0ac\uc6a9\ud588\ub358 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uacfc \ud568\uaed8 \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"../kubeflow/basic-component"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud560 \ub54c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0 \ub300\ud55c \ud0c0\uc785 \ud78c\ud2b8\ub97c \uc801\uc5b4\uc57c \ud55c\ub2e4\uace0 \uc124\uba85 \ud588\uc5c8\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d json\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8 \ud0c0\uc785\uc774 \uc544\ub2cc dataframe, model\uc640 \uac19\uc774 \ubcf5\uc7a1\ud55c \uac1d\uccb4\ub4e4\uc740 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?"),(0,r.kt)("p",null,"\ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\uac04\uc5d0 \uac12\uc744 \uc804\ub2ec\ud560 \ub54c, \uac1d\uccb4\ub97c \ubc18\ud658\ud574\ub3c4 \uadf8 \uac12\uc774 \ud638\uc2a4\ud2b8\uc758 \uba54\ubaa8\ub9ac\uc5d0 \uc800\uc7a5\ub418\uc5b4 \uc788\uc73c\ubbc0\ub85c \ub2e4\uc74c \ud568\uc218\uc5d0\uc11c\ub3c4 \uac19\uc740 \uac1d\uccb4\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc740 \uac01\uac01 \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \uc11c\ub85c \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \uac19\uc740 \uba54\ubaa8\ub9ac\ub97c \uacf5\uc720\ud558\uace0 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ubcf4\ud1b5\uc758 \ud30c\uc774\uc36c \ud568\uc218\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ubc29\uc2dd\uacfc \uac19\uc774 \uac1d\uccb4\ub97c \uc804\ub2ec\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \ub118\uaca8 \uc904 \uc218 \uc788\ub294 \uc815\ubcf4\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"json")," \uc73c\ub85c\ub9cc \uac00\ub2a5\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c Model\uc774\ub098 DataFrame\uacfc \uac19\uc774 json \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud560 \uc218 \uc5c6\ub294 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \ub2e4\ub978 \ubc29\ubc95\uc744 \ud1b5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574 json-serializable \ud558\uc9c0 \uc54a\uc740 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \uba54\ubaa8\ub9ac \ub300\uc2e0 \ud30c\uc77c\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud55c \ub4a4, \uadf8 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc815\ubcf4\ub97c \uc804\ub2ec\ud569\ub2c8\ub2e4. \uc800\uc7a5\ub41c \ud30c\uc77c\uc758 \uacbd\ub85c\ub294 str\uc774\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \uc804\ub2ec\ud560 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 kubeflow\uc5d0\uc11c\ub294 minio\ub97c \uc774\uc6a9\ud574 \ud30c\uc77c\uc744 \uc800\uc7a5\ud558\ub294\ub370 \uc720\uc800\ub294 \uc2e4\ud589\uc744 \ud558\uae30 \uc804\uc5d0\ub294 \uac01 \ud30c\uc77c\uc758 \uacbd\ub85c\ub97c \uc54c \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \uc774\ub97c \uc704\ud574\uc11c kubeflow\uc5d0\uc11c\ub294 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \ub9e4\uc9c1\uc744 \uc81c\uacf5\ud558\ub294\ub370 \ubc14\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \uc785\ub825 \uacbd\ub85c\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \ucd9c\ub825 \uacbd\ub85c\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()"),"\ub97c argument\ub85c \ub9cc\ub4ed\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ub370\uc774\ud130\ub97c \ubc1b\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()"),"\uc744 argument\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ub807\uac8c \ub9cc\ub4e0 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc11c\ub85c \uc5f0\uacb0\uc744 \ud558\uba74 kubeflow\uc5d0\uc11c \ud544\uc694\ud55c \uacbd\ub85c\ub97c \uc790\ub3d9\uc73c\ub85c \uc0dd\uc131\ud6c4 \uc785\ub825\ud574 \uc8fc\uae30 \ub54c\ubb38\uc5d0 \ub354 \uc774\uc0c1 \uc720\uc800\ub294 \uacbd\ub85c\ub97c \uc2e0\uacbd\uc4f0\uc9c0 \uc54a\uace0 \ucef4\ud3ec\ub10c\ud2b8\uac04\uc758 \uad00\uacc4\ub9cc \uc2e0\uacbd\uc4f0\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc774 \ub0b4\uc6a9\uc744 \ubc14\ud0d5\uc73c\ub85c \ub2e4\uc2dc \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,"InputPath\ub098 OutputPath\ub294 string\uc744 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 string\uc740 \uc785\ub825 \ub610\ub294 \ucd9c\ub825\ud558\ub824\uace0 \ud558\ub294 \ud30c\uc77c\uc758 \ud3ec\ub9f7\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\ub2e4\uace0 \uaf2d \uc774 \ud3ec\ub9f7\uc73c\ub85c \ud30c\uc77c \ud615\ud0dc\ub85c \uc800\uc7a5\uc774 \uac15\uc81c\ub418\ub294 \uac83\uc740 \uc544\ub2d9\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \ucef4\ud30c\uc77c\ud560 \ub54c \ucd5c\uc18c\ud55c\uc758 \ud0c0\uc785 \uccb4\ud06c\ub97c \uc704\ud55c \ub3c4\uc6b0\ubbf8 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ud30c\uc77c \ud3ec\ub9f7\uc774 \uace0\uc815\ub418\uc9c0 \uc54a\ub294\ub2e4\uba74 \uc785\ub825\ud558\uc9c0 \uc54a\uc73c\uba74 \ub429\ub2c8\ub2e4 (\ud0c0\uc785 \ud78c\ud2b8 \uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"Any")," \uc640 \uac19\uc740 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4)."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"\uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"rule-to-use-inputpathoutputpath"},"Rule to use InputPath/OutputPath"),(0,r.kt)("p",null,"InputPath\ub098 OutputPath argument\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud560 \ub54c \uc9c0\ucf1c\uc57c\ud558\ub294 \uaddc\uce59\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub370\uc774\ud130\uac00 \ud544\uc694\ud558\ubbc0\ub85c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,r.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,r.kt)("p",null,"\ud55c \uac00\uc9c0 \uc774\uc0c1\ud55c \uc810\uc744 \ud655\uc778\ud558\uc168\ub098\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\ubc14\ub85c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0\uc11c \ubc1b\ub294 argument\uc911 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \uac83\ub4e4\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]')," \uac00 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]')," \uc73c\ub85c \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 kubeflow\uc5d0\uc11c \uc815\ud55c \ubc95\uce59\uc73c\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc73c\ub85c \uc0dd\uc131\ub41c \uacbd\ub85c\ub4e4\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc811\uadfc\ud560 \ub54c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\ub97c \uc0dd\ub7b5\ud558\uc5ec \uc811\uadfc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\ub9cc \ubc29\uae08 \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uacbd\uc6b0 \uc2e4\ud589\uc774 \ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc774\uc720\ub294 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8693],{3905:(t,e,n)=>{n.d(e,{Zo:()=>d,kt:()=>f});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function p(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),u=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):p(p({},e),t)),n},d=function(t){var e=u(t.components);return a.createElement(l.Provider,{value:e},t.children)},s="mdxType",m={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},c=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,o=t.originalType,l=t.parentName,d=i(t,["components","mdxType","originalType","parentName"]),s=u(n),c=r,f=s["".concat(l,".").concat(c)]||s[c]||m[c]||o;return n?a.createElement(f,p(p({ref:e},d),{},{components:n})):a.createElement(f,p({ref:e},d))}));function f(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var o=n.length,p=new Array(o);p[0]=c;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[s]="string"==typeof t?t:r,p[1]=i;for(var u=2;u{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>p,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var a=n(7462),r=(n(7294),n(3905));const o={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},p=void 0,i={unversionedId:"kubeflow/advanced-component",id:"version-1.0/kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/docs/1.0/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/docs/1.0/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/docs/1.0/kubeflow/advanced-environment"}},l={},u=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule to use InputPath/OutputPath",id:"rule-to-use-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],d={toc:u},s="wrapper";function m(t){let{components:e,...n}=t;return(0,r.kt)(s,(0,a.Z)({},d,n,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," \uc608\uc2dc\ub85c \ub098\uc654\ub358 \ucf54\ub4dc\ub97c \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"\uc544\ub798 \ucf54\ub4dc\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"\uc5d0\uc11c \uc0ac\uc6a9\ud588\ub358 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uacfc \ud568\uaed8 \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"../kubeflow/basic-component"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud560 \ub54c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0 \ub300\ud55c \ud0c0\uc785 \ud78c\ud2b8\ub97c \uc801\uc5b4\uc57c \ud55c\ub2e4\uace0 \uc124\uba85 \ud588\uc5c8\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d json\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8 \ud0c0\uc785\uc774 \uc544\ub2cc dataframe, model\uc640 \uac19\uc774 \ubcf5\uc7a1\ud55c \uac1d\uccb4\ub4e4\uc740 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?"),(0,r.kt)("p",null,"\ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\uac04\uc5d0 \uac12\uc744 \uc804\ub2ec\ud560 \ub54c, \uac1d\uccb4\ub97c \ubc18\ud658\ud574\ub3c4 \uadf8 \uac12\uc774 \ud638\uc2a4\ud2b8\uc758 \uba54\ubaa8\ub9ac\uc5d0 \uc800\uc7a5\ub418\uc5b4 \uc788\uc73c\ubbc0\ub85c \ub2e4\uc74c \ud568\uc218\uc5d0\uc11c\ub3c4 \uac19\uc740 \uac1d\uccb4\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc740 \uac01\uac01 \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \uc11c\ub85c \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \uac19\uc740 \uba54\ubaa8\ub9ac\ub97c \uacf5\uc720\ud558\uace0 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ubcf4\ud1b5\uc758 \ud30c\uc774\uc36c \ud568\uc218\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ubc29\uc2dd\uacfc \uac19\uc774 \uac1d\uccb4\ub97c \uc804\ub2ec\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \ub118\uaca8 \uc904 \uc218 \uc788\ub294 \uc815\ubcf4\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"json")," \uc73c\ub85c\ub9cc \uac00\ub2a5\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c Model\uc774\ub098 DataFrame\uacfc \uac19\uc774 json \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud560 \uc218 \uc5c6\ub294 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \ub2e4\ub978 \ubc29\ubc95\uc744 \ud1b5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574 json-serializable \ud558\uc9c0 \uc54a\uc740 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \uba54\ubaa8\ub9ac \ub300\uc2e0 \ud30c\uc77c\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud55c \ub4a4, \uadf8 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc815\ubcf4\ub97c \uc804\ub2ec\ud569\ub2c8\ub2e4. \uc800\uc7a5\ub41c \ud30c\uc77c\uc758 \uacbd\ub85c\ub294 str\uc774\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \uc804\ub2ec\ud560 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 kubeflow\uc5d0\uc11c\ub294 minio\ub97c \uc774\uc6a9\ud574 \ud30c\uc77c\uc744 \uc800\uc7a5\ud558\ub294\ub370 \uc720\uc800\ub294 \uc2e4\ud589\uc744 \ud558\uae30 \uc804\uc5d0\ub294 \uac01 \ud30c\uc77c\uc758 \uacbd\ub85c\ub97c \uc54c \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \uc774\ub97c \uc704\ud574\uc11c kubeflow\uc5d0\uc11c\ub294 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \ub9e4\uc9c1\uc744 \uc81c\uacf5\ud558\ub294\ub370 \ubc14\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \uc785\ub825 \uacbd\ub85c\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \ucd9c\ub825 \uacbd\ub85c\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()"),"\ub97c argument\ub85c \ub9cc\ub4ed\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ub370\uc774\ud130\ub97c \ubc1b\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()"),"\uc744 argument\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ub807\uac8c \ub9cc\ub4e0 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc11c\ub85c \uc5f0\uacb0\uc744 \ud558\uba74 kubeflow\uc5d0\uc11c \ud544\uc694\ud55c \uacbd\ub85c\ub97c \uc790\ub3d9\uc73c\ub85c \uc0dd\uc131\ud6c4 \uc785\ub825\ud574 \uc8fc\uae30 \ub54c\ubb38\uc5d0 \ub354 \uc774\uc0c1 \uc720\uc800\ub294 \uacbd\ub85c\ub97c \uc2e0\uacbd\uc4f0\uc9c0 \uc54a\uace0 \ucef4\ud3ec\ub10c\ud2b8\uac04\uc758 \uad00\uacc4\ub9cc \uc2e0\uacbd\uc4f0\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc774 \ub0b4\uc6a9\uc744 \ubc14\ud0d5\uc73c\ub85c \ub2e4\uc2dc \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,"InputPath\ub098 OutputPath\ub294 string\uc744 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 string\uc740 \uc785\ub825 \ub610\ub294 \ucd9c\ub825\ud558\ub824\uace0 \ud558\ub294 \ud30c\uc77c\uc758 \ud3ec\ub9f7\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\ub2e4\uace0 \uaf2d \uc774 \ud3ec\ub9f7\uc73c\ub85c \ud30c\uc77c \ud615\ud0dc\ub85c \uc800\uc7a5\uc774 \uac15\uc81c\ub418\ub294 \uac83\uc740 \uc544\ub2d9\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \ucef4\ud30c\uc77c\ud560 \ub54c \ucd5c\uc18c\ud55c\uc758 \ud0c0\uc785 \uccb4\ud06c\ub97c \uc704\ud55c \ub3c4\uc6b0\ubbf8 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ud30c\uc77c \ud3ec\ub9f7\uc774 \uace0\uc815\ub418\uc9c0 \uc54a\ub294\ub2e4\uba74 \uc785\ub825\ud558\uc9c0 \uc54a\uc73c\uba74 \ub429\ub2c8\ub2e4 (\ud0c0\uc785 \ud78c\ud2b8 \uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"Any")," \uc640 \uac19\uc740 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4)."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"\uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"rule-to-use-inputpathoutputpath"},"Rule to use InputPath/OutputPath"),(0,r.kt)("p",null,"InputPath\ub098 OutputPath argument\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud560 \ub54c \uc9c0\ucf1c\uc57c\ud558\ub294 \uaddc\uce59\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub370\uc774\ud130\uac00 \ud544\uc694\ud558\ubbc0\ub85c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,r.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,r.kt)("p",null,"\ud55c \uac00\uc9c0 \uc774\uc0c1\ud55c \uc810\uc744 \ud655\uc778\ud558\uc168\ub098\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\ubc14\ub85c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0\uc11c \ubc1b\ub294 argument\uc911 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \uac83\ub4e4\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]')," \uac00 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]')," \uc73c\ub85c \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 kubeflow\uc5d0\uc11c \uc815\ud55c \ubc95\uce59\uc73c\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc73c\ub85c \uc0dd\uc131\ub41c \uacbd\ub85c\ub4e4\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc811\uadfc\ud560 \ub54c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\ub97c \uc0dd\ub7b5\ud558\uc5ec \uc811\uadfc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\ub9cc \ubc29\uae08 \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uacbd\uc6b0 \uc2e4\ud589\uc774 \ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc774\uc720\ub294 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/5b3cd5ae.1cea74eb.js b/assets/js/5b3cd5ae.08e33229.js similarity index 99% rename from assets/js/5b3cd5ae.1cea74eb.js rename to assets/js/5b3cd5ae.08e33229.js index 7e228bec..ea4080ce 100644 --- a/assets/js/5b3cd5ae.1cea74eb.js +++ b/assets/js/5b3cd5ae.08e33229.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2375],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var p=a.createContext({}),m=function(e){var n=a.useContext(p),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},s=function(e){var n=m(e.components);return a.createElement(p.Provider,{value:n},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),d=m(t),c=r,k=d["".concat(p,".").concat(c)]||d[c]||u[c]||l;return t?a.createElement(k,o(o({ref:n},s),{},{components:t})):a.createElement(k,o({ref:n},s))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=t.length,o=new Array(l);o[0]=c;var i={};for(var p in n)hasOwnProperty.call(n,p)&&(i[p]=n[p]);i.originalType=e,i[d]="string"==typeof e?e:r,o[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>i,toc:()=>m});var a=t(7462),r=(t(7294),t(3905));const l={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/docs/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/docs/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-fields.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/docs/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/docs/api-deployment/seldon-mlflow"}},p={},m=[{value:"How Seldon Core works?",id:"how-seldon-core-works",level:2},{value:"SeldonDeployment Spec",id:"seldondeployment-spec",level:2},{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"initContainer",id:"initcontainer",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],s={toc:m},d="wrapper";function u(e){let{components:n,...l}=e;return(0,r.kt)(d,(0,a.Z)({},s,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"how-seldon-core-works"},"How Seldon Core works?"),(0,r.kt)("p",null,"Seldon Core\uac00 API \uc11c\ubc84\ub97c \uc0dd\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc694\uc57d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-fields-0.png",src:t(3438).Z,width:"2784",height:"1000"})),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"initContainer\ub294 \ubaa8\ub378 \uc800\uc7a5\uc18c\uc5d0\uc11c \ud544\uc694\ud55c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 container\ub85c \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"container\ub294 \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \uac10\uc2fc API \uc11c\ubc84\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\uc0dd\uc131\ub41c API \uc11c\ubc84 \uc8fc\uc18c\ub85c API\ub97c \uc694\uccad\ud558\uc5ec \ubaa8\ub378\uc758 \ucd94\ub860 \uac12\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("h2",{id:"seldondeployment-spec"},"SeldonDeployment Spec"),(0,r.kt)("p",null,"Seldon Core\ub97c \uc0ac\uc6a9\ud560 \ub54c, \uc8fc\ub85c \uc0ac\uc6a9\ud558\uac8c \ub418\ub294 \ucee4\uc2a4\ud140 \ub9ac\uc18c\uc2a4\uc778 SeldonDeployment\ub97c \uc815\uc758\ud558\ub294 yaml \ud30c\uc77c\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,r.kt)("p",null,"SeldonDeployment spe \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"name")," \uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"predictors")," \ud544\ub4dc\ub294 required \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"predictors"),"\ub294 \ud55c \uac1c\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph")," \uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c\ub3c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph"),"\uc5d0\uc11c \uc815\uc758\ud574\uc57c \ud560 \ud544\ub4dc\ub4e4\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \ub294 \ud558\ub098\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \ud0a4\uac12\uc774 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \uc5d0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"initContainers"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"containers")," \uc758 \ud544\ub4dc\uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"volumes"},"volumes"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),"\uc740 initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uae30 \uc704\ud55c \uacf5\uac04\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uc785\ub825\uc744 \ubc1b\uc73c\uba70 array\uc758 \uad6c\uc131 \uc694\uc18c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"emptyDir")," \uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\ub4e4\uc740 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uace0 \uc62e\uae38 \ub54c \ud55c\ubc88 \uc0ac\uc6a9\ub418\ubbc0\ub85c \ud06c\uac8c \uc218\uc815\ud558\uc9c0 \uc54a\uc544\ub3c4 \ub429\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"initcontainer"},"initContainer"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,r.kt)("p",null,"initContainer\ub294 API\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ub418\ub294 \ud544\ub4dc\ub4e4\uc740 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub85c\ubd80\ud130 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ub54c \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \uc815\ud574\uc90d\ub2c8\ub2e4."),(0,r.kt)("p",null,"initContainer\uc758 \uac12\uc740 n\uac1c\uc758 array\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc73c\uba70 \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\ub9c8\ub2e4 \uac01\uac01 \uc9c0\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"name"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub514\ubc84\uae45\uc744 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," \ub85c \uc0ac\uc6a9\ud558\uae38 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uae30 \uc704\ud574 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","seldon core\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \uc774\ubbf8\uc9c0\ub294 \ud06c\uac8c \ub450 \uac00\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,r.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,r.kt)("p",null,"\uac01\uac01\uc758 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uc744 \ucc38\uace0 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c\ub294 kfserving\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"args"},"args"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,r.kt)("p",null,"gcr.io/kfserving/storage-initializer:v0.4.0 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uac00 \uc2e4\ud589(",(0,r.kt)("inlineCode",{parentName:"p"},"run"),")\ub420 \ub54c \uc785\ub825\ubc1b\ub294 argument\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uad6c\uc131\ub418\uba70 \uccab \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub450 \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4. (seldon core\uc5d0\uc11c\ub294 \uc8fc\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.)"),(0,r.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumneMounts"),"\ub294 volumes\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d \ubcfc\ub968\uc744 \ubd99\uc5ec\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 Volume"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"container"},"container"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"container\ub294 \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 API \ud615\uc2dd\uc73c\ub85c \uc2e4\ud589\ub420 \ub54c\uc758 \uc124\uc815\uc744 \uc815\uc758\ud558\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4. "),(0,r.kt)("h4",{id:"name-1"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\uc758 \uc774\ub984\uc744 \uc801\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image-1"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \ub370 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ubbf8\uc9c0\uc5d0\ub294 \ubaa8\ub378\uc774 \ub85c\ub4dc\ub420 \ub54c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uc774 \ubaa8\ub450 \uc124\uce58\ub418\uc5b4 \uc788\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Seldon Core\uc5d0\uc11c \uc9c0\uc6d0\ud558\ub294 \uacf5\uc2dd \uc774\ubbf8\uc9c0\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,r.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,r.kt)("p",null,"initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ub370\uc774\ud130\uac00 \uc788\ub294 \uacbd\ub85c\ub97c \uc54c\ub824\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub54c \ubaa8\ub378\uc774 \uc218\uc815\ub418\ub294 \uac83\uc744 \ubc29\uc9c0\ud558\uae30 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"readOnly: true"),"\ub3c4 \uac19\uc774 \uc8fc\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"securitycontext"},"securityContext"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"\ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud560 \ub54c pod\uc774 \uad8c\ud55c\uc774 \uc5c6\uc5b4\uc11c \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc218\ud589\ud558\uc9c0 \ubabb\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574\uc11c root \uad8c\ud55c\uc744 \ubd80\uc5ec\ud569\ub2c8\ub2e4. (\ub2e4\ub9cc \uc774 \uc791\uc5c5\uc740 \uc2e4\uc81c \uc11c\ube59 \uc2dc \ubcf4\uc548 \ubb38\uc81c\uac00 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"graph"},"graph"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,r.kt)("p",null,"\ubaa8\ub378\uc774 \ub3d9\uc791\ud558\ub294 \uc21c\uc11c\ub97c \uc815\uc758\ud55c \ud544\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"name-2"},"name"),(0,r.kt)("p",null,"\ubaa8\ub378 \uadf8\ub798\ud504\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. container\uc5d0\uc11c \uc815\uc758\ub41c \uc774\ub984\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"type"},"type"),(0,r.kt)("p",null,"type\uc740 \ud06c\uac8c 4\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"MODEL"),(0,r.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"ROUTER")),(0,r.kt)("p",null,"\uac01 type\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"parameters"},"parameters"),(0,r.kt)("p",null,"class init \uc5d0\uc11c \uc0ac\uc6a9\ub418\ub294 \uac12\ub4e4\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","sklearnserver\uc5d0\uc11c \ud544\uc694\ud55c \uac12\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"\ub2e4\uc74c \ud30c\uc77c"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,r.kt)("p",null,"\ucf54\ub4dc\ub97c \ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"model_uri"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"method"),"\ub97c \uc815\uc758\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"children"},"children"),(0,r.kt)("p",null,"\uc21c\uc11c\ub3c4\ub97c \uc791\uc131\ud560 \ub54c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},3438:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/seldon-fields-0-7794367220b87e1aba920b6aad6f9bf8.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2375],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var p=a.createContext({}),m=function(e){var n=a.useContext(p),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},s=function(e){var n=m(e.components);return a.createElement(p.Provider,{value:n},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,s=i(e,["components","mdxType","originalType","parentName"]),d=m(t),c=r,k=d["".concat(p,".").concat(c)]||d[c]||u[c]||l;return t?a.createElement(k,o(o({ref:n},s),{},{components:t})):a.createElement(k,o({ref:n},s))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=t.length,o=new Array(l);o[0]=c;var i={};for(var p in n)hasOwnProperty.call(n,p)&&(i[p]=n[p]);i.originalType=e,i[d]="string"==typeof e?e:r,o[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>i,toc:()=>m});var a=t(7462),r=(t(7294),t(3905));const l={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/docs/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/docs/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-fields.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/docs/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/docs/api-deployment/seldon-mlflow"}},p={},m=[{value:"How Seldon Core works?",id:"how-seldon-core-works",level:2},{value:"SeldonDeployment Spec",id:"seldondeployment-spec",level:2},{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"initContainer",id:"initcontainer",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],s={toc:m},d="wrapper";function u(e){let{components:n,...l}=e;return(0,r.kt)(d,(0,a.Z)({},s,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"how-seldon-core-works"},"How Seldon Core works?"),(0,r.kt)("p",null,"Seldon Core\uac00 API \uc11c\ubc84\ub97c \uc0dd\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc694\uc57d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-fields-0.png",src:t(3438).Z,width:"2784",height:"1000"})),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"initContainer\ub294 \ubaa8\ub378 \uc800\uc7a5\uc18c\uc5d0\uc11c \ud544\uc694\ud55c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 container\ub85c \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"container\ub294 \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \uac10\uc2fc API \uc11c\ubc84\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\uc0dd\uc131\ub41c API \uc11c\ubc84 \uc8fc\uc18c\ub85c API\ub97c \uc694\uccad\ud558\uc5ec \ubaa8\ub378\uc758 \ucd94\ub860 \uac12\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("h2",{id:"seldondeployment-spec"},"SeldonDeployment Spec"),(0,r.kt)("p",null,"Seldon Core\ub97c \uc0ac\uc6a9\ud560 \ub54c, \uc8fc\ub85c \uc0ac\uc6a9\ud558\uac8c \ub418\ub294 \ucee4\uc2a4\ud140 \ub9ac\uc18c\uc2a4\uc778 SeldonDeployment\ub97c \uc815\uc758\ud558\ub294 yaml \ud30c\uc77c\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,r.kt)("p",null,"SeldonDeployment spe \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"name")," \uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"predictors")," \ud544\ub4dc\ub294 required \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"predictors"),"\ub294 \ud55c \uac1c\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph")," \uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c\ub3c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph"),"\uc5d0\uc11c \uc815\uc758\ud574\uc57c \ud560 \ud544\ub4dc\ub4e4\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \ub294 \ud558\ub098\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \ud0a4\uac12\uc774 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \uc5d0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"initContainers"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"containers")," \uc758 \ud544\ub4dc\uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"volumes"},"volumes"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),"\uc740 initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uae30 \uc704\ud55c \uacf5\uac04\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uc785\ub825\uc744 \ubc1b\uc73c\uba70 array\uc758 \uad6c\uc131 \uc694\uc18c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"emptyDir")," \uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\ub4e4\uc740 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uace0 \uc62e\uae38 \ub54c \ud55c\ubc88 \uc0ac\uc6a9\ub418\ubbc0\ub85c \ud06c\uac8c \uc218\uc815\ud558\uc9c0 \uc54a\uc544\ub3c4 \ub429\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"initcontainer"},"initContainer"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,r.kt)("p",null,"initContainer\ub294 API\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ub418\ub294 \ud544\ub4dc\ub4e4\uc740 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub85c\ubd80\ud130 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ub54c \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \uc815\ud574\uc90d\ub2c8\ub2e4."),(0,r.kt)("p",null,"initContainer\uc758 \uac12\uc740 n\uac1c\uc758 array\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc73c\uba70 \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\ub9c8\ub2e4 \uac01\uac01 \uc9c0\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"name"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub514\ubc84\uae45\uc744 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," \ub85c \uc0ac\uc6a9\ud558\uae38 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uae30 \uc704\ud574 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","seldon core\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \uc774\ubbf8\uc9c0\ub294 \ud06c\uac8c \ub450 \uac00\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,r.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,r.kt)("p",null,"\uac01\uac01\uc758 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uc744 \ucc38\uace0 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c\ub294 kfserving\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"args"},"args"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,r.kt)("p",null,"gcr.io/kfserving/storage-initializer:v0.4.0 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uac00 \uc2e4\ud589(",(0,r.kt)("inlineCode",{parentName:"p"},"run"),")\ub420 \ub54c \uc785\ub825\ubc1b\ub294 argument\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uad6c\uc131\ub418\uba70 \uccab \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub450 \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4. (seldon core\uc5d0\uc11c\ub294 \uc8fc\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.)"),(0,r.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumneMounts"),"\ub294 volumes\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d \ubcfc\ub968\uc744 \ubd99\uc5ec\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 Volume"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"container"},"container"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"container\ub294 \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 API \ud615\uc2dd\uc73c\ub85c \uc2e4\ud589\ub420 \ub54c\uc758 \uc124\uc815\uc744 \uc815\uc758\ud558\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4. "),(0,r.kt)("h4",{id:"name-1"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\uc758 \uc774\ub984\uc744 \uc801\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image-1"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \ub370 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ubbf8\uc9c0\uc5d0\ub294 \ubaa8\ub378\uc774 \ub85c\ub4dc\ub420 \ub54c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uc774 \ubaa8\ub450 \uc124\uce58\ub418\uc5b4 \uc788\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Seldon Core\uc5d0\uc11c \uc9c0\uc6d0\ud558\ub294 \uacf5\uc2dd \uc774\ubbf8\uc9c0\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,r.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,r.kt)("p",null,"initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ub370\uc774\ud130\uac00 \uc788\ub294 \uacbd\ub85c\ub97c \uc54c\ub824\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub54c \ubaa8\ub378\uc774 \uc218\uc815\ub418\ub294 \uac83\uc744 \ubc29\uc9c0\ud558\uae30 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"readOnly: true"),"\ub3c4 \uac19\uc774 \uc8fc\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"securitycontext"},"securityContext"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"\ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud560 \ub54c pod\uc774 \uad8c\ud55c\uc774 \uc5c6\uc5b4\uc11c \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc218\ud589\ud558\uc9c0 \ubabb\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574\uc11c root \uad8c\ud55c\uc744 \ubd80\uc5ec\ud569\ub2c8\ub2e4. (\ub2e4\ub9cc \uc774 \uc791\uc5c5\uc740 \uc2e4\uc81c \uc11c\ube59 \uc2dc \ubcf4\uc548 \ubb38\uc81c\uac00 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"graph"},"graph"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,r.kt)("p",null,"\ubaa8\ub378\uc774 \ub3d9\uc791\ud558\ub294 \uc21c\uc11c\ub97c \uc815\uc758\ud55c \ud544\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"name-2"},"name"),(0,r.kt)("p",null,"\ubaa8\ub378 \uadf8\ub798\ud504\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. container\uc5d0\uc11c \uc815\uc758\ub41c \uc774\ub984\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"type"},"type"),(0,r.kt)("p",null,"type\uc740 \ud06c\uac8c 4\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"MODEL"),(0,r.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"ROUTER")),(0,r.kt)("p",null,"\uac01 type\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"parameters"},"parameters"),(0,r.kt)("p",null,"class init \uc5d0\uc11c \uc0ac\uc6a9\ub418\ub294 \uac12\ub4e4\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","sklearnserver\uc5d0\uc11c \ud544\uc694\ud55c \uac12\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"\ub2e4\uc74c \ud30c\uc77c"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,r.kt)("p",null,"\ucf54\ub4dc\ub97c \ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"model_uri"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"method"),"\ub97c \uc815\uc758\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"children"},"children"),(0,r.kt)("p",null,"\uc21c\uc11c\ub3c4\ub97c \uc791\uc131\ud560 \ub54c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},3438:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/seldon-fields-0-7794367220b87e1aba920b6aad6f9bf8.png"}}]); \ No newline at end of file diff --git a/assets/js/5dc48d01.db2a5c35.js b/assets/js/5dc48d01.a92c1cb6.js similarity index 98% rename from assets/js/5dc48d01.db2a5c35.js rename to assets/js/5dc48d01.a92c1cb6.js index 9c1feef7..e22ff618 100644 --- a/assets/js/5dc48d01.db2a5c35.js +++ b/assets/js/5dc48d01.a92c1cb6.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2610],{3905:(e,t,r)=>{r.d(t,{Zo:()=>l,kt:()=>m});var n=r(7294);function u(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function s(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(u[r]=e[r]);return u}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(u[r]=e[r])}return u}var p=n.createContext({}),i=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},l=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",b={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,u=e.mdxType,s=e.originalType,p=e.parentName,l=a(e,["components","mdxType","originalType","parentName"]),c=i(r),k=u,m=c["".concat(p,".").concat(k)]||c[k]||b[k]||s;return r?n.createElement(m,o(o({ref:t},l),{},{components:r})):n.createElement(m,o({ref:t},l))}));function m(e,t){var r=arguments,u=t&&t.mdxType;if("string"==typeof e||u){var s=r.length,o=new Array(s);o[0]=k;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[c]="string"==typeof e?e:u,o[1]=a;for(var i=2;i{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>b,frontMatter:()=>s,metadata:()=>a,toc:()=>i});var n=r(7462),u=(r(7294),r(3905));const s={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,a={unversionedId:"setup-kubernetes/kubernetes",id:"setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/docs/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/docs/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/docs/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/docs/setup-kubernetes/install-prerequisite"}},p={},i=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],l={toc:i},c="wrapper";function b(e){let{components:t,...r}=e;return(0,u.kt)(c,(0,n.Z)({},l,r,{components:t,mdxType:"MDXLayout"}),(0,u.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,u.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \ucc98\uc74c \ubc30\uc6b0\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c \uccab \uc9c4\uc785 \uc7a5\ubcbd\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc2e4\uc2b5 \ud658\uacbd\uc744 \uad6c\ucd95\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,u.kt)("p",null,"\ud504\ub85c\ub355\uc158 \ub808\ubca8\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \uc218 \uc788\uac8c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\ub294 \ub3c4\uad6c\ub294 kubeadm \uc774\uc9c0\ub9cc, \uc0ac\uc6a9\uc790\ub4e4\uc774 \uc870\uae08 \ub354 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 kubespray, kops \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud558\uba70, \ud559\uc2b5 \ubaa9\uc801\uc744 \uc704\ud574\uc11c \ucef4\ud329\ud2b8\ud55c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc815\ub9d0 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 k3s, minikube, microk8s, kind \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,u.kt)("p",null,"\uac01\uac01\uc758 \ub3c4\uad6c\ub294 \uc7a5\ub2e8\uc810\uc774 \ub2e4\ub974\uae30\uc5d0 \uc0ac\uc6a9\uc790\ub9c8\ub2e4 \uc120\ud638\ud558\ub294 \ub3c4\uad6c\uac00 \ub2e4\ub978 \uc810\uc744 \uace0\ub824\ud558\uc5ec, \ubcf8 \uae00\uc5d0\uc11c\ub294 kubeadm, k3s, minikube\uc758 3\uac00\uc9c0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.\n\uac01 \ub3c4\uad6c\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ube44\uad50\ub294 \ub2e4\uc74c \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,u.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"\uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,u.kt)("p",null,(0,u.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \ud234\uc740 ",(0,u.kt)("strong",{parentName:"p"},"k3s"),"\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c \uc27d\uac8c \ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.",(0,u.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \ub178\ub4dc \uad6c\uc131\uae4c\uc9c0 \ud65c\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 ",(0,u.kt)("strong",{parentName:"p"},"kubeadm"),"\uc744 \uad8c\uc7a5\ud574 \ub4dc\ub9bd\ub2c8\ub2e4.",(0,u.kt)("br",{parentName:"p"}),"\n",(0,u.kt)("strong",{parentName:"p"},"minikube")," \ub294 \uc800\ud76c\uac00 \uc124\uba85\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \uc678\uc5d0\ub3c4 \ub2e4\ub978 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c add-on \ud615\uc2dd\uc73c\ub85c \uc27d\uac8c \uc124\uce58\ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,u.kt)("p",null,"\ubcf8 ",(0,u.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uad6c\ucd95\ud558\uac8c \ub420 MLOps \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 \uc6d0\ud65c\ud788 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c, \ucd94\uac00\ub85c \uc124\uc815\ud574 \uc8fc\uc5b4\uc57c \ud558\ub294 \ubd80\ubd84\uc774 \ucd94\uac00\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,u.kt)("p",null,"Ubuntu OS\uae4c\uc9c0\ub294 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud0d1\uc744 k8s cluster\ub85c \uad6c\ucd95\ud55c \ub4a4, \uc678\ubd80 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud558\ub294 \uac83\uae4c\uc9c0\uac00 \ubcf8 ",(0,u.kt)("strong",{parentName:"p"},"Setup Kubernetes"),"\ub2e8\uc6d0\uc758 \ubc94\uc704\uc785\ub2c8\ub2e4."),(0,u.kt)("p",null,"\uc790\uc138\ud55c \uad6c\ucd95 \ubc29\ubc95\uc740 3\uac00\uc9c0 \ub3c4\uad6c\ub9c8\ub2e4 \ub2e4\ub974\uae30\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc740 \ud750\ub984\uc73c\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,u.kt)("pre",null,(0,u.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,u.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \ubc18\ub4dc\uc2dc \ubaa8\ub4e0 \ub3c4\uad6c\ub97c \uc0ac\uc6a9\ud574 \ubcfc \ud544\uc694\ub294 \uc5c6\uc73c\uba70, \uc774 \uc911 \uc5ec\ub7ec\ubd84\uc774 \uc775\uc219\ud558\uc2e0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc8fc\uc2dc\uba74 \ucda9\ubd84\ud569\ub2c8\ub2e4."))}b.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2610],{3905:(e,t,r)=>{r.d(t,{Zo:()=>l,kt:()=>m});var n=r(7294);function u(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function s(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(u[r]=e[r]);return u}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(u[r]=e[r])}return u}var p=n.createContext({}),i=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},l=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",b={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,u=e.mdxType,s=e.originalType,p=e.parentName,l=a(e,["components","mdxType","originalType","parentName"]),c=i(r),k=u,m=c["".concat(p,".").concat(k)]||c[k]||b[k]||s;return r?n.createElement(m,o(o({ref:t},l),{},{components:r})):n.createElement(m,o({ref:t},l))}));function m(e,t){var r=arguments,u=t&&t.mdxType;if("string"==typeof e||u){var s=r.length,o=new Array(s);o[0]=k;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[c]="string"==typeof e?e:u,o[1]=a;for(var i=2;i{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>b,frontMatter:()=>s,metadata:()=>a,toc:()=>i});var n=r(7462),u=(r(7294),r(3905));const s={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,a={unversionedId:"setup-kubernetes/kubernetes",id:"setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/docs/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/docs/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/docs/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/docs/setup-kubernetes/install-prerequisite"}},p={},i=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],l={toc:i},c="wrapper";function b(e){let{components:t,...r}=e;return(0,u.kt)(c,(0,n.Z)({},l,r,{components:t,mdxType:"MDXLayout"}),(0,u.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,u.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \ucc98\uc74c \ubc30\uc6b0\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c \uccab \uc9c4\uc785 \uc7a5\ubcbd\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc2e4\uc2b5 \ud658\uacbd\uc744 \uad6c\ucd95\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,u.kt)("p",null,"\ud504\ub85c\ub355\uc158 \ub808\ubca8\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \uc218 \uc788\uac8c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\ub294 \ub3c4\uad6c\ub294 kubeadm \uc774\uc9c0\ub9cc, \uc0ac\uc6a9\uc790\ub4e4\uc774 \uc870\uae08 \ub354 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 kubespray, kops \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud558\uba70, \ud559\uc2b5 \ubaa9\uc801\uc744 \uc704\ud574\uc11c \ucef4\ud329\ud2b8\ud55c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc815\ub9d0 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 k3s, minikube, microk8s, kind \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,u.kt)("p",null,"\uac01\uac01\uc758 \ub3c4\uad6c\ub294 \uc7a5\ub2e8\uc810\uc774 \ub2e4\ub974\uae30\uc5d0 \uc0ac\uc6a9\uc790\ub9c8\ub2e4 \uc120\ud638\ud558\ub294 \ub3c4\uad6c\uac00 \ub2e4\ub978 \uc810\uc744 \uace0\ub824\ud558\uc5ec, \ubcf8 \uae00\uc5d0\uc11c\ub294 kubeadm, k3s, minikube\uc758 3\uac00\uc9c0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.\n\uac01 \ub3c4\uad6c\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ube44\uad50\ub294 \ub2e4\uc74c \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,u.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"\uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,u.kt)("p",null,(0,u.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \ud234\uc740 ",(0,u.kt)("strong",{parentName:"p"},"k3s"),"\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c \uc27d\uac8c \ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.",(0,u.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \ub178\ub4dc \uad6c\uc131\uae4c\uc9c0 \ud65c\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 ",(0,u.kt)("strong",{parentName:"p"},"kubeadm"),"\uc744 \uad8c\uc7a5\ud574 \ub4dc\ub9bd\ub2c8\ub2e4.",(0,u.kt)("br",{parentName:"p"}),"\n",(0,u.kt)("strong",{parentName:"p"},"minikube")," \ub294 \uc800\ud76c\uac00 \uc124\uba85\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \uc678\uc5d0\ub3c4 \ub2e4\ub978 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c add-on \ud615\uc2dd\uc73c\ub85c \uc27d\uac8c \uc124\uce58\ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,u.kt)("p",null,"\ubcf8 ",(0,u.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uad6c\ucd95\ud558\uac8c \ub420 MLOps \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 \uc6d0\ud65c\ud788 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c, \ucd94\uac00\ub85c \uc124\uc815\ud574 \uc8fc\uc5b4\uc57c \ud558\ub294 \ubd80\ubd84\uc774 \ucd94\uac00\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,u.kt)("p",null,"Ubuntu OS\uae4c\uc9c0\ub294 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud0d1\uc744 k8s cluster\ub85c \uad6c\ucd95\ud55c \ub4a4, \uc678\ubd80 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud558\ub294 \uac83\uae4c\uc9c0\uac00 \ubcf8 ",(0,u.kt)("strong",{parentName:"p"},"Setup Kubernetes"),"\ub2e8\uc6d0\uc758 \ubc94\uc704\uc785\ub2c8\ub2e4."),(0,u.kt)("p",null,"\uc790\uc138\ud55c \uad6c\ucd95 \ubc29\ubc95\uc740 3\uac00\uc9c0 \ub3c4\uad6c\ub9c8\ub2e4 \ub2e4\ub974\uae30\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc740 \ud750\ub984\uc73c\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,u.kt)("pre",null,(0,u.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,u.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \ubc18\ub4dc\uc2dc \ubaa8\ub4e0 \ub3c4\uad6c\ub97c \uc0ac\uc6a9\ud574 \ubcfc \ud544\uc694\ub294 \uc5c6\uc73c\uba70, \uc774 \uc911 \uc5ec\ub7ec\ubd84\uc774 \uc775\uc219\ud558\uc2e0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc8fc\uc2dc\uba74 \ucda9\ubd84\ud569\ub2c8\ub2e4."))}b.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6016bee0.43321011.js b/assets/js/6016bee0.0e9c8c76.js similarity index 97% rename from assets/js/6016bee0.43321011.js rename to assets/js/6016bee0.0e9c8c76.js index bb42630b..1a822064 100644 --- a/assets/js/6016bee0.43321011.js +++ b/assets/js/6016bee0.0e9c8c76.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2349],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),c=s(n),m=o,f=c["".concat(p,".").concat(m)]||c[m]||u[m]||a;return n?r.createElement(f,l(l({ref:t},d),{},{components:n})):r.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=m;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[c]="string"==typeof e?e:o,l[1]=i;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>u,frontMatter:()=>a,metadata:()=>i,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,i={unversionedId:"api-deployment/seldon-pg",id:"version-1.0/api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/docs/1.0/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/docs/1.0/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/docs/1.0/api-deployment/seldon-fields"}},p={},s=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"\ub300\uc2dc\ubcf4\ub4dc",id:"\ub300\uc2dc\ubcf4\ub4dc",level:3},{value:"API \uc694\uccad",id:"api-\uc694\uccad",level:3}],d={toc:s},c="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,r.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,o.kt)("p",null,"\uc774\uc81c, ",(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/api-deployment/seldon-iris"},"\uc9c0\ub09c \ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc0dd\uc131\ud588\ub358 SeldonDeployment \ub85c API Request \ub97c \ubc18\ubcf5\uc801\uc73c\ub85c \uc218\ud589\ud574\ubcf4\uace0, \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \ubcc0\ud654\uac00 \uc77c\uc5b4\ub098\ub294\uc9c0 \ud655\uc778\ud574\ubd05\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"\ub300\uc2dc\ubcf4\ub4dc"},"\ub300\uc2dc\ubcf4\ub4dc"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-pg"},"\uc55e\uc11c \uc0dd\uc131\ud55c \ub300\uc2dc\ubcf4\ub4dc"),"\ub97c \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("h3",{id:"api-\uc694\uccad"},"API \uc694\uccad"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/api-deployment/seldon-iris#using-cli"},"\uc55e\uc11c \uc0dd\uc131\ud55c Seldon Deployment"),"\uc5d0 \uc694\uccad\uc744 ",(0,o.kt)("strong",{parentName:"p"},"\ubc18\ubcf5\ud574\uc11c")," \ubcf4\ub0c5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,o.kt)("p",null,"\uadf8\ub9ac\uace0 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 Global Request Rate \uc774 ",(0,o.kt)("inlineCode",{parentName:"p"},"0 ops")," \uc5d0\uc11c \uc21c\uac04\uc801\uc73c\ub85c \uc0c1\uc2b9\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"repeat-raise.png",src:n(147).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"\uc774\ub807\uac8c \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}u.isMDXComponent=!0},147:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2349],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),c=s(n),m=o,f=c["".concat(p,".").concat(m)]||c[m]||u[m]||a;return n?r.createElement(f,l(l({ref:t},d),{},{components:n})):r.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=m;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[c]="string"==typeof e?e:o,l[1]=i;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>u,frontMatter:()=>a,metadata:()=>i,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,i={unversionedId:"api-deployment/seldon-pg",id:"version-1.0/api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/docs/1.0/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/docs/1.0/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/docs/1.0/api-deployment/seldon-fields"}},p={},s=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"\ub300\uc2dc\ubcf4\ub4dc",id:"\ub300\uc2dc\ubcf4\ub4dc",level:3},{value:"API \uc694\uccad",id:"api-\uc694\uccad",level:3}],d={toc:s},c="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,r.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,o.kt)("p",null,"\uc774\uc81c, ",(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/api-deployment/seldon-iris"},"\uc9c0\ub09c \ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc0dd\uc131\ud588\ub358 SeldonDeployment \ub85c API Request \ub97c \ubc18\ubcf5\uc801\uc73c\ub85c \uc218\ud589\ud574\ubcf4\uace0, \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \ubcc0\ud654\uac00 \uc77c\uc5b4\ub098\ub294\uc9c0 \ud655\uc778\ud574\ubd05\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"\ub300\uc2dc\ubcf4\ub4dc"},"\ub300\uc2dc\ubcf4\ub4dc"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-pg"},"\uc55e\uc11c \uc0dd\uc131\ud55c \ub300\uc2dc\ubcf4\ub4dc"),"\ub97c \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("h3",{id:"api-\uc694\uccad"},"API \uc694\uccad"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/api-deployment/seldon-iris#using-cli"},"\uc55e\uc11c \uc0dd\uc131\ud55c Seldon Deployment"),"\uc5d0 \uc694\uccad\uc744 ",(0,o.kt)("strong",{parentName:"p"},"\ubc18\ubcf5\ud574\uc11c")," \ubcf4\ub0c5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,o.kt)("p",null,"\uadf8\ub9ac\uace0 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 Global Request Rate \uc774 ",(0,o.kt)("inlineCode",{parentName:"p"},"0 ops")," \uc5d0\uc11c \uc21c\uac04\uc801\uc73c\ub85c \uc0c1\uc2b9\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"repeat-raise.png",src:n(147).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"\uc774\ub807\uac8c \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}u.isMDXComponent=!0},147:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file diff --git a/assets/js/6246222d.56e22807.js b/assets/js/6246222d.f026b1ff.js similarity index 98% rename from assets/js/6246222d.56e22807.js rename to assets/js/6246222d.f026b1ff.js index 57e5f12c..121aecdb 100644 --- a/assets/js/6246222d.56e22807.js +++ b/assets/js/6246222d.f026b1ff.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[728],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,s=e.originalType,i=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),u=o,b=c["".concat(i,".").concat(u)]||c[u]||m[u]||s;return t?a.createElement(b,r(r({ref:n},d),{},{components:t})):a.createElement(b,r({ref:n},d))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[c]="string"==typeof e?e:o,r[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>m,frontMatter:()=>s,metadata:()=>l,toc:()=>p});var a=t(7462),o=(t(7294),t(3905));const s={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},r=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"version-1.0/setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/versioned_docs/version-1.0/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/docs/1.0/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-seldon.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/docs/1.0/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/docs/1.0/setup-components/install-components-pg"}},i={},p=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Selon-Core \uc124\uce58",id:"selon-core-\uc124\uce58",level:2},{value:"Ambassador - Helm Repository \ucd94\uac00",id:"ambassador---helm-repository-\ucd94\uac00",level:3},{value:"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],d={toc:p},c="wrapper";function m(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub808\uc784\uc6cc\ud06c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 Seldon-Core \uc758 \uacf5\uc2dd ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"\uc81c\ud488 \uc124\uba85 \ud398\uc774\uc9c0")," \uc640 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"\uae43\ud5d9")," \uadf8\ub9ac\uace0 API Deployment \ud30c\ud2b8\ub97c \ucc38\uace0\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"selon-core-\uc124\uce58"},"Selon-Core \uc124\uce58"),(0,o.kt)("p",null,"Seldon-Core\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc778\uadf8\ub808\uc2a4(Ingress)\ub97c \ub2f4\ub2f9\ud558\ub294 Ambassador \uc640 Istio \uc640 \uac19\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"\ubaa8\ub4c8\uc774 \ud544\uc694\ud569\ub2c8\ub2e4"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core \uc5d0\uc11c\ub294 Ambassador \uc640 Istio \ub9cc\uc744 \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uba70, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Ambassador\ub97c \uc0ac\uc6a9\ud574 Seldon-core\ub97c \uc0ac\uc6a9\ud558\ubbc0\ub85c Ambassador\ub97c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"ambassador---helm-repository-\ucd94\uac00"},"Ambassador - Helm Repository \ucd94\uac00"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"ambassador Chart 6.9.3 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"seldon-system \uc5d0 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"seldon-core-operator Chart 1.11.2 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"seldon-system namespace \uc5d0 1 \uac1c\uc758 seldon-controller-manager pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[728],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,s=e.originalType,i=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),u=o,b=c["".concat(i,".").concat(u)]||c[u]||m[u]||s;return t?a.createElement(b,r(r({ref:n},d),{},{components:t})):a.createElement(b,r({ref:n},d))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[c]="string"==typeof e?e:o,r[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>m,frontMatter:()=>s,metadata:()=>l,toc:()=>p});var a=t(7462),o=(t(7294),t(3905));const s={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},r=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"version-1.0/setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/versioned_docs/version-1.0/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/docs/1.0/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-seldon.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/docs/1.0/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/docs/1.0/setup-components/install-components-pg"}},i={},p=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Selon-Core \uc124\uce58",id:"selon-core-\uc124\uce58",level:2},{value:"Ambassador - Helm Repository \ucd94\uac00",id:"ambassador---helm-repository-\ucd94\uac00",level:3},{value:"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],d={toc:p},c="wrapper";function m(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub808\uc784\uc6cc\ud06c \uc911 \ud558\ub098\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 Seldon-Core \uc758 \uacf5\uc2dd ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"\uc81c\ud488 \uc124\uba85 \ud398\uc774\uc9c0")," \uc640 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"\uae43\ud5d9")," \uadf8\ub9ac\uace0 API Deployment \ud30c\ud2b8\ub97c \ucc38\uace0\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"selon-core-\uc124\uce58"},"Selon-Core \uc124\uce58"),(0,o.kt)("p",null,"Seldon-Core\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc778\uadf8\ub808\uc2a4(Ingress)\ub97c \ub2f4\ub2f9\ud558\ub294 Ambassador \uc640 Istio \uc640 \uac19\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"\ubaa8\ub4c8\uc774 \ud544\uc694\ud569\ub2c8\ub2e4"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core \uc5d0\uc11c\ub294 Ambassador \uc640 Istio \ub9cc\uc744 \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uba70, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Ambassador\ub97c \uc0ac\uc6a9\ud574 Seldon-core\ub97c \uc0ac\uc6a9\ud558\ubbc0\ub85c Ambassador\ub97c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"ambassador---helm-repository-\ucd94\uac00"},"Ambassador - Helm Repository \ucd94\uac00"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"ambassador---helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Ambassador - Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"ambassador Chart 6.9.3 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"seldon-system \uc5d0 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"seldon-core-operator Chart 1.11.2 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"seldon-system namespace \uc5d0 1 \uac1c\uc758 seldon-controller-manager pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/68f81397.d55909e0.js b/assets/js/68f81397.c0222f1d.js similarity index 97% rename from assets/js/68f81397.d55909e0.js rename to assets/js/68f81397.c0222f1d.js index 9af313d5..5c5f9ec3 100644 --- a/assets/js/68f81397.d55909e0.js +++ b/assets/js/68f81397.c0222f1d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2603],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(r),m=o,f=c["".concat(l,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,i(i({ref:t},u),{},{components:r})):n.createElement(f,i({ref:t},u))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var p=2;p{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/docs/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/docs/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/docs/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/kubeflow-dashboard-guide/experiments-and-others"}},l={},p=[],u={toc:p},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(AutoML)\uc744 \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(7274).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"Experiments(AutoML) \ud398\uc774\uc9c0\ub294 Kubeflow\uc5d0\uc11c Hyperparameter Tuning\uacfc Neural Architecture Search\ub97c \ud1b5\ud55c AutoML\uc744 \ub2f4\ub2f9\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),"\ub97c \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"Katib\uc640 Experiments(AutoML)\uc5d0 \ub300\ud55c \uc0ac\uc6a9\ubc95\uc740 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," v1.0\uc5d0\uc11c\ub294 \ub2e4\ub8e8\uc9c0 \uc54a\uc73c\uba70, v2.0\uc5d0 \ucd94\uac00\ub420 \uc608\uc815\uc785\ub2c8\ub2e4."))}d.isMDXComponent=!0},7274:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2603],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(r),m=o,f=c["".concat(l,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,i(i({ref:t},u),{},{components:r})):n.createElement(f,i({ref:t},u))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var p=2;p{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/docs/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/docs/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/docs/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/kubeflow-dashboard-guide/experiments-and-others"}},l={},p=[],u={toc:p},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(AutoML)\uc744 \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(7274).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"Experiments(AutoML) \ud398\uc774\uc9c0\ub294 Kubeflow\uc5d0\uc11c Hyperparameter Tuning\uacfc Neural Architecture Search\ub97c \ud1b5\ud55c AutoML\uc744 \ub2f4\ub2f9\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),"\ub97c \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"Katib\uc640 Experiments(AutoML)\uc5d0 \ub300\ud55c \uc0ac\uc6a9\ubc95\uc740 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," v1.0\uc5d0\uc11c\ub294 \ub2e4\ub8e8\uc9c0 \uc54a\uc73c\uba70, v2.0\uc5d0 \ucd94\uac00\ub420 \uc608\uc815\uc785\ub2c8\ub2e4."))}d.isMDXComponent=!0},7274:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/assets/js/6b57b422.056e4749.js b/assets/js/6b57b422.d50d1f5b.js similarity index 99% rename from assets/js/6b57b422.056e4749.js rename to assets/js/6b57b422.d50d1f5b.js index 172b9403..c4fe084f 100644 --- a/assets/js/6b57b422.056e4749.js +++ b/assets/js/6b57b422.d50d1f5b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8449],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>h});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function o(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=a.createContext({}),i=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=i(e.components);return a.createElement(p.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=i(n),m=r,h=d["".concat(p,".").concat(m)]||d[m]||u[m]||l;return n?a.createElement(h,o(o({ref:t},c),{},{components:n})):a.createElement(h,o({ref:t},c))}));function h(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=n.length,o=new Array(l);o[0]=m;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[d]="string"==typeof e?e:r,o[1]=s;for(var i=2;i{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>s,toc:()=>i});var a=n(7462),r=(n(7294),n(3905));const l={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-pg",id:"setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/docs/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/docs/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/docs/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/docs/kubeflow-dashboard-guide/intro"}},p={},i=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3},{value:"References",id:"references",level:2}],c={toc:i},d="wrapper";function u(e){let{components:t,...l}=e;return(0,r.kt)(d,(0,a.Z)({},c,l,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4(Prometheus) \uc640 \uadf8\ub77c\ud30c\ub098(Grafana) \ub294 \ubaa8\ub2c8\ud130\ub9c1\uc744 \uc704\ud55c \ub3c4\uad6c\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc548\uc815\uc801\uc778 \uc11c\ube44\uc2a4 \uc6b4\uc601\uc744 \uc704\ud574\uc11c\ub294 \uc11c\ube44\uc2a4\uc640 \uc11c\ube44\uc2a4\uac00 \uc6b4\uc601\ub418\uace0 \uc788\ub294 \uc778\ud504\ub77c\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ucc30\ud558\uace0, \uad00\ucc30\ud55c \uba54\ud2b8\ub9ad\uc744 \ubc14\ud0d5\uc73c\ub85c \ubb38\uc81c\uac00 \uc0dd\uae38 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ubaa8\ub2c8\ud130\ub9c1\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud558\uae30 \uc704\ud55c \ub9ce\uc740 \ub3c4\uad6c \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc624\ud508\uc18c\uc2a4\uc778 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus \uacf5\uc2dd \ubb38\uc11c"),", ",(0,r.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\ub294 \ub2e4\uc591\ud55c \ub300\uc0c1\uc73c\ub85c\ubd80\ud130 Metric\uc744 \uc218\uc9d1\ud558\ub294 \ub3c4\uad6c\uc774\uba70, \uadf8\ub77c\ud30c\ub098\ub294 \ubaa8\uc778 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc11c\ub85c \uac04\uc758 \uc885\uc18d\uc131\uc740 \uc5c6\uc9c0\ub9cc \uc0c1\ud638 \ubcf4\uc644\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc5b4 \ud568\uaed8 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud55c \ub4a4, Seldon-Core \ub85c \uc0dd\uc131\ud55c SeldonDeployment \ub85c API \uc694\uccad\uc744 \ubcf4\ub0b4, \uc815\uc0c1\uc801\uc73c\ub85c Metrics \uc774 \uc218\uc9d1\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ubcf8 \uae00\uc5d0\uc11c\ub294 seldonio/seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud558\uace0, Seldon-Core \uc5d0\uc11c \uc0dd\uc131\ud55c SeldonDeployment\uc758 Metrics \uc744 \ud6a8\uc728\uc801\uc73c\ub85c \ud655\uc778\ud558\uae30 \uc704\ud55c \ub300\uc2dc\ubcf4\ub4dc\ub3c4 \ud568\uaed8 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,r.kt)("p",null,"seldon-system namespace \uc5d0 6\uac1c\uc758 seldon-core-analytics \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uadf8\ub77c\ud30c\ub098\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-install",src:n(1030).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d\uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Email or username : ",(0,r.kt)("inlineCode",{parentName:"li"},"admin")),(0,r.kt)("li",{parentName:"ul"},"Password : ",(0,r.kt)("inlineCode",{parentName:"li"},"password"))),(0,r.kt)("p",null,"\ub85c\uadf8\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-login",src:n(8234).Z,width:"3640",height:"2140"})),(0,r.kt)("p",null,"\uc88c\uce21\uc758 \ub300\uc2dc\ubcf4\ub4dc \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Manage")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard-click",src:n(1502).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\uae30\ubcf8\uc801\uc778 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\uac00 \ud3ec\ud568\ub418\uc5b4\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard",src:n(4231).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"Seldon Core API Dashboard \uac00 \ubcf4\uc774\uace0, \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-dashboard",src:n(9803).Z,width:"5016",height:"2826"})),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},1502:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},4231:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},1030:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},8234:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},9803:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8449],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>h});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function o(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=a.createContext({}),i=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},c=function(e){var t=i(e.components);return a.createElement(p.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=i(n),m=r,h=d["".concat(p,".").concat(m)]||d[m]||u[m]||l;return n?a.createElement(h,o(o({ref:t},c),{},{components:n})):a.createElement(h,o({ref:t},c))}));function h(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=n.length,o=new Array(l);o[0]=m;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[d]="string"==typeof e?e:r,o[1]=s;for(var i=2;i{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>s,toc:()=>i});var a=n(7462),r=(n(7294),n(3905));const l={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-pg",id:"setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/docs/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/docs/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/docs/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/docs/kubeflow-dashboard-guide/intro"}},p={},i=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3},{value:"References",id:"references",level:2}],c={toc:i},d="wrapper";function u(e){let{components:t,...l}=e;return(0,r.kt)(d,(0,a.Z)({},c,l,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4(Prometheus) \uc640 \uadf8\ub77c\ud30c\ub098(Grafana) \ub294 \ubaa8\ub2c8\ud130\ub9c1\uc744 \uc704\ud55c \ub3c4\uad6c\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc548\uc815\uc801\uc778 \uc11c\ube44\uc2a4 \uc6b4\uc601\uc744 \uc704\ud574\uc11c\ub294 \uc11c\ube44\uc2a4\uc640 \uc11c\ube44\uc2a4\uac00 \uc6b4\uc601\ub418\uace0 \uc788\ub294 \uc778\ud504\ub77c\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ucc30\ud558\uace0, \uad00\ucc30\ud55c \uba54\ud2b8\ub9ad\uc744 \ubc14\ud0d5\uc73c\ub85c \ubb38\uc81c\uac00 \uc0dd\uae38 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ubaa8\ub2c8\ud130\ub9c1\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud558\uae30 \uc704\ud55c \ub9ce\uc740 \ub3c4\uad6c \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc624\ud508\uc18c\uc2a4\uc778 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus \uacf5\uc2dd \ubb38\uc11c"),", ",(0,r.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\ub294 \ub2e4\uc591\ud55c \ub300\uc0c1\uc73c\ub85c\ubd80\ud130 Metric\uc744 \uc218\uc9d1\ud558\ub294 \ub3c4\uad6c\uc774\uba70, \uadf8\ub77c\ud30c\ub098\ub294 \ubaa8\uc778 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc11c\ub85c \uac04\uc758 \uc885\uc18d\uc131\uc740 \uc5c6\uc9c0\ub9cc \uc0c1\ud638 \ubcf4\uc644\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc5b4 \ud568\uaed8 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud55c \ub4a4, Seldon-Core \ub85c \uc0dd\uc131\ud55c SeldonDeployment \ub85c API \uc694\uccad\uc744 \ubcf4\ub0b4, \uc815\uc0c1\uc801\uc73c\ub85c Metrics \uc774 \uc218\uc9d1\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ubcf8 \uae00\uc5d0\uc11c\ub294 seldonio/seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud558\uace0, Seldon-Core \uc5d0\uc11c \uc0dd\uc131\ud55c SeldonDeployment\uc758 Metrics \uc744 \ud6a8\uc728\uc801\uc73c\ub85c \ud655\uc778\ud558\uae30 \uc704\ud55c \ub300\uc2dc\ubcf4\ub4dc\ub3c4 \ud568\uaed8 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,r.kt)("p",null,"seldon-system namespace \uc5d0 6\uac1c\uc758 seldon-core-analytics \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uadf8\ub77c\ud30c\ub098\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-install",src:n(1030).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d\uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Email or username : ",(0,r.kt)("inlineCode",{parentName:"li"},"admin")),(0,r.kt)("li",{parentName:"ul"},"Password : ",(0,r.kt)("inlineCode",{parentName:"li"},"password"))),(0,r.kt)("p",null,"\ub85c\uadf8\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-login",src:n(8234).Z,width:"3640",height:"2140"})),(0,r.kt)("p",null,"\uc88c\uce21\uc758 \ub300\uc2dc\ubcf4\ub4dc \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Manage")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard-click",src:n(1502).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\uae30\ubcf8\uc801\uc778 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\uac00 \ud3ec\ud568\ub418\uc5b4\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard",src:n(4231).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"Seldon Core API Dashboard \uac00 \ubcf4\uc774\uace0, \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-dashboard",src:n(9803).Z,width:"5016",height:"2826"})),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},1502:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},4231:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},1030:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},8234:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},9803:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file diff --git a/assets/js/6b7916cd.a6c9252d.js b/assets/js/6b7916cd.e3e4eaac.js similarity index 99% rename from assets/js/6b7916cd.a6c9252d.js rename to assets/js/6b7916cd.e3e4eaac.js index cd4ebf11..b5b38f39 100644 --- a/assets/js/6b7916cd.a6c9252d.js +++ b/assets/js/6b7916cd.e3e4eaac.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8457],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var m=l.createContext({}),s=function(e){var n=l.useContext(m),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=s(e.components);return l.createElement(m.Provider,{value:n},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,m=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=s(t),u=a,f=c["".concat(m,".").concat(u)]||c[u]||d[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var m in n)hasOwnProperty.call(n,m)&&(i[m]=n[m]);i.originalType=e,i[c]="string"==typeof e?e:a,r[1]=i;for(var s=2;s{t.r(n),t.d(n,{assets:()=>m,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"version-1.0/api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/docs/1.0/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/docs/1.0/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/docs/1.0/api-deployment/seldon-children"}},m={},s=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API \uc0dd\uc131",id:"api-\uc0dd\uc131",level:2}],p={toc:s},c="wrapper";function d(e){let{components:n,...o}=e;return(0,a.kt)(c,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-mlflow"},"MLflow Component"),"\uc5d0\uc11c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 API\ub97c \uc0dd\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"initContainer\uac00 minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc73c\ub824\uba74 credentials\uac00 \ud544\uc694\ud569\ub2c8\ub2e4.\nminio\uc5d0 \uc811\uadfc\ud558\uae30 \uc704\ud55c credentials\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," \uc758 \uc785\ub825\uac12\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),"\uc785\ub2c8\ub2e4. \ub2e4\ub9cc secret\uc758 \uc785\ub825\uac12\uc740 \uc778\ucf54\ub529\ub41c \uac12\uc774\uc5ec\uc57c \ub418\uae30 \ub54c\ubb38\uc5d0 \uc2e4\uc81c\ub85c \uc785\ub825\ub418\ub294 \uac12\uc740 \ub2e4\uc74c\uc744 \uc218\ud589\ud6c4 \ub098\uc624\ub294 \uac12\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"data\uc5d0 \uc785\ub825\ub418\uc5b4\uc57c \ud558\ub294 \uac12\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uac12\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc744 \uc804\uccb4 \uac12\uc5d0 \ub300\ud574\uc11c \uc9c4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: bWluaW8="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: bWluaW8xMjM="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA="),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: ZmFsc2U=")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\ub294 yaml\ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"\uc774\uc81c Seldon Core\ub97c \uc0dd\uc131\ud558\ub294 yaml\ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"\uc774 \uc804\uc5d0 \uc791\uc131\ud55c ",(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/api-deployment/seldon-fields"},"Seldon Fields"),"\uc640 \ub2ec\ub77c\uc9c4 \uc810\uc740 \ud06c\uac8c \ub450 \ubd80\ubd84\uc785\ub2c8\ub2e4.\ninitContainer\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"envFrom")," \ud544\ub4dc\uac00 \ucd94\uac00\ub418\uc5c8\uc73c\uba70 args\uc758 \uc8fc\uc18c\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc")," \ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"\uc55e\uc11c args\uc758 \uccab\ubc88\uc9f8 array\ub294 \uc6b0\ub9ac\uac00 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uacbd\ub85c\ub77c\uace0 \ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7fc mlflow\uc5d0 \uc800\uc7a5\ub41c \ubaa8\ub378\uc758 \uacbd\ub85c\ub294 \uc5b4\ub5bb\uac8c \uc54c \uc218 \uc788\uc744\uae4c\uc694?"),(0,a.kt)("p",null,"\ub2e4\uc2dc mlflow\uc5d0 \ub4e4\uc5b4\uac00\uc11c run\uc744 \ud074\ub9ad\ud558\uace0 \ubaa8\ub378\uc744 \ub204\ub974\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(8764).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"\uc774\ub807\uac8c \ud655\uc778\ub41c \uacbd\ub85c\ub97c \uc785\ub825\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ub370 \ud544\uc694\ud55c \ud658\uacbd\ubcc0\uc218\ub97c \uc785\ub825\ud574\uc8fc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4.\n\uc55e\uc11c \ub9cc\ub4e0 ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret"),"\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"api-\uc0dd\uc131"},"API \uc0dd\uc131"),(0,a.kt)("p",null,"\uc6b0\uc120 \uc704\uc5d0\uc11c \uc815\uc758\ud55c \uc2a4\ud399\uc744 yaml \ud30c\uc77c\ub85c \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"seldon pod\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"\uc774\uc81c pod\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub730 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c API\ub97c \uc0dd\uc131\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"CLI\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ub41c API\uc5d0\ub294 \ub2e4\uc74c request\ub97c \ud1b5\ud574 \uc2e4\ud589\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}d.isMDXComponent=!0},8764:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8457],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var m=l.createContext({}),s=function(e){var n=l.useContext(m),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=s(e.components);return l.createElement(m.Provider,{value:n},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,m=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=s(t),u=a,f=c["".concat(m,".").concat(u)]||c[u]||d[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var m in n)hasOwnProperty.call(n,m)&&(i[m]=n[m]);i.originalType=e,i[c]="string"==typeof e?e:a,r[1]=i;for(var s=2;s{t.r(n),t.d(n,{assets:()=>m,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"version-1.0/api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/docs/1.0/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/docs/1.0/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/docs/1.0/api-deployment/seldon-children"}},m={},s=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API \uc0dd\uc131",id:"api-\uc0dd\uc131",level:2}],p={toc:s},c="wrapper";function d(e){let{components:n,...o}=e;return(0,a.kt)(c,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-mlflow"},"MLflow Component"),"\uc5d0\uc11c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 API\ub97c \uc0dd\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"initContainer\uac00 minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc73c\ub824\uba74 credentials\uac00 \ud544\uc694\ud569\ub2c8\ub2e4.\nminio\uc5d0 \uc811\uadfc\ud558\uae30 \uc704\ud55c credentials\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," \uc758 \uc785\ub825\uac12\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),"\uc785\ub2c8\ub2e4. \ub2e4\ub9cc secret\uc758 \uc785\ub825\uac12\uc740 \uc778\ucf54\ub529\ub41c \uac12\uc774\uc5ec\uc57c \ub418\uae30 \ub54c\ubb38\uc5d0 \uc2e4\uc81c\ub85c \uc785\ub825\ub418\ub294 \uac12\uc740 \ub2e4\uc74c\uc744 \uc218\ud589\ud6c4 \ub098\uc624\ub294 \uac12\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"data\uc5d0 \uc785\ub825\ub418\uc5b4\uc57c \ud558\ub294 \uac12\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uac12\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc744 \uc804\uccb4 \uac12\uc5d0 \ub300\ud574\uc11c \uc9c4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: bWluaW8="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: bWluaW8xMjM="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA="),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: ZmFsc2U=")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\ub294 yaml\ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"\uc774\uc81c Seldon Core\ub97c \uc0dd\uc131\ud558\ub294 yaml\ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"\uc774 \uc804\uc5d0 \uc791\uc131\ud55c ",(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/api-deployment/seldon-fields"},"Seldon Fields"),"\uc640 \ub2ec\ub77c\uc9c4 \uc810\uc740 \ud06c\uac8c \ub450 \ubd80\ubd84\uc785\ub2c8\ub2e4.\ninitContainer\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"envFrom")," \ud544\ub4dc\uac00 \ucd94\uac00\ub418\uc5c8\uc73c\uba70 args\uc758 \uc8fc\uc18c\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc")," \ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"\uc55e\uc11c args\uc758 \uccab\ubc88\uc9f8 array\ub294 \uc6b0\ub9ac\uac00 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uacbd\ub85c\ub77c\uace0 \ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7fc mlflow\uc5d0 \uc800\uc7a5\ub41c \ubaa8\ub378\uc758 \uacbd\ub85c\ub294 \uc5b4\ub5bb\uac8c \uc54c \uc218 \uc788\uc744\uae4c\uc694?"),(0,a.kt)("p",null,"\ub2e4\uc2dc mlflow\uc5d0 \ub4e4\uc5b4\uac00\uc11c run\uc744 \ud074\ub9ad\ud558\uace0 \ubaa8\ub378\uc744 \ub204\ub974\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(8764).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"\uc774\ub807\uac8c \ud655\uc778\ub41c \uacbd\ub85c\ub97c \uc785\ub825\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ub370 \ud544\uc694\ud55c \ud658\uacbd\ubcc0\uc218\ub97c \uc785\ub825\ud574\uc8fc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4.\n\uc55e\uc11c \ub9cc\ub4e0 ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret"),"\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"api-\uc0dd\uc131"},"API \uc0dd\uc131"),(0,a.kt)("p",null,"\uc6b0\uc120 \uc704\uc5d0\uc11c \uc815\uc758\ud55c \uc2a4\ud399\uc744 yaml \ud30c\uc77c\ub85c \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"seldon pod\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"\uc774\uc81c pod\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub730 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c API\ub97c \uc0dd\uc131\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"CLI\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ub41c API\uc5d0\ub294 \ub2e4\uc74c request\ub97c \ud1b5\ud574 \uc2e4\ud589\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}d.isMDXComponent=!0},8764:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file diff --git a/assets/js/6d8a40f9.9b685874.js b/assets/js/6d8a40f9.e46891e9.js similarity index 99% rename from assets/js/6d8a40f9.9b685874.js rename to assets/js/6d8a40f9.e46891e9.js index d0b89f80..f7dbefc8 100644 --- a/assets/js/6d8a40f9.9b685874.js +++ b/assets/js/6d8a40f9.e46891e9.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4775],{3905:(e,n,t)=>{t.d(n,{Zo:()=>u,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},u=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),d=p(t),m=r,k=d["".concat(s,".").concat(m)]||d[m]||c[m]||i;return t?a.createElement(k,o(o({ref:n},u),{},{components:t})):a.createElement(k,o({ref:n},u))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[d]="string"==typeof e?e:r,o[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var a=t(7462),r=(t(7294),t(3905));const i={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,l={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/docs/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/docs/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/docs/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/docs/setup-components/install-components-kf"}},s={},p=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. NVIDIA-Docker \uc124\uce58",id:"2-nvidia-docker-\uc124\uce58",level:2},{value:"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815",id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],u={toc:p},d="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,a.Z)({},u,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc0f Kubeflow \ub4f1\uc5d0\uc11c GP \ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi")," \uc218\ud589 \uc2dc \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub41c\ub2e4\uba74 \uc774 \ub2e8\uacc4\ub294 \uc0dd\ub7b5\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),"\uc758 \ucd9c\ub825 \uacb0\uacfc\uac00 \uc704\uc640 \uac19\uc9c0 \uc54a\ub2e4\uba74 \uc7a5\ucc29\ub41c GPU\uc5d0 \ub9de\ub294 nvidia driver\ub97c \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d nvidia driver\uc758 \uc124\uce58\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\ub2e4\uba74 \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,r.kt)("h2",{id:"2-nvidia-docker-\uc124\uce58"},"2. NVIDIA-Docker \uc124\uce58"),(0,r.kt)("p",null,"NVIDIA-Docker\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uae30 \uc704\ud574, GPU\ub97c \uc0ac\uc6a9\ud558\ub294 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574\ubd05\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("h2",{id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815"},"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker-CE\ub97c Default Container Runtime\uc73c\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\ub530\ub77c\uc11c, Docker Container \ub0b4\uc5d0\uc11c NVIDIA GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NVIDIA-Docker \ub97c Container Runtime \uc73c\ub85c \uc0ac\uc6a9\ud558\uc5ec pod\ub97c \uc0dd\uc131\ud560 \uc218 \uc788\ub3c4\ub85d Default Runtime\uc744 \uc218\uc815\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," \ud30c\uc77c\uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud30c\uc77c\uc774 \ubcc0\uacbd\ub41c \uac83\uc744 \ud655\uc778\ud55c \ud6c4, Docker\ub97c \uc7ac\uc2dc\uc791\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubcc0\uacbd \uc0ac\ud56d\uc774 \ubc18\uc601\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,r.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin daemonset\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin pod\uc774 RUNNING \uc0c1\ud0dc\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"node \uc815\ubcf4\uc5d0 gpu\uac00 \uc0ac\uc6a9\uac00\ub2a5\ud558\ub3c4\ub85d \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","(",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c \ud074\ub7ec\uc2a4\ud130\ub294 2\uac1c\uc758 GPU\uac00 \uc788\uc5b4\uc11c 2\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4.\n\ubcf8\uc778\uc758 \ud074\ub7ec\uc2a4\ud130\uc758 GPU \uac1c\uc218\uc640 \ub9de\ub294 \uc22b\uc790\uac00 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub429\ub2c8\ub2e4.)"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")))),(0,r.kt)("p",null,"\uc124\uc815\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0, GPU\uc758 value\uac00 ",(0,r.kt)("inlineCode",{parentName:"p"},"")," \uc73c\ub85c \ud45c\uc2dc\ub429\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4775],{3905:(e,n,t)=>{t.d(n,{Zo:()=>u,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},u=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),d=p(t),m=r,k=d["".concat(s,".").concat(m)]||d[m]||c[m]||i;return t?a.createElement(k,o(o({ref:n},u),{},{components:t})):a.createElement(k,o({ref:n},u))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[d]="string"==typeof e?e:r,o[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var a=t(7462),r=(t(7294),t(3905));const i={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,l={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/docs/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/docs/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/docs/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/docs/setup-components/install-components-kf"}},s={},p=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. NVIDIA-Docker \uc124\uce58",id:"2-nvidia-docker-\uc124\uce58",level:2},{value:"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815",id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],u={toc:p},d="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,a.Z)({},u,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc0f Kubeflow \ub4f1\uc5d0\uc11c GP \ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi")," \uc218\ud589 \uc2dc \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub41c\ub2e4\uba74 \uc774 \ub2e8\uacc4\ub294 \uc0dd\ub7b5\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),"\uc758 \ucd9c\ub825 \uacb0\uacfc\uac00 \uc704\uc640 \uac19\uc9c0 \uc54a\ub2e4\uba74 \uc7a5\ucc29\ub41c GPU\uc5d0 \ub9de\ub294 nvidia driver\ub97c \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d nvidia driver\uc758 \uc124\uce58\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\ub2e4\uba74 \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,r.kt)("h2",{id:"2-nvidia-docker-\uc124\uce58"},"2. NVIDIA-Docker \uc124\uce58"),(0,r.kt)("p",null,"NVIDIA-Docker\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uae30 \uc704\ud574, GPU\ub97c \uc0ac\uc6a9\ud558\ub294 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574\ubd05\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("h2",{id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815"},"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker-CE\ub97c Default Container Runtime\uc73c\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\ub530\ub77c\uc11c, Docker Container \ub0b4\uc5d0\uc11c NVIDIA GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NVIDIA-Docker \ub97c Container Runtime \uc73c\ub85c \uc0ac\uc6a9\ud558\uc5ec pod\ub97c \uc0dd\uc131\ud560 \uc218 \uc788\ub3c4\ub85d Default Runtime\uc744 \uc218\uc815\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," \ud30c\uc77c\uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud30c\uc77c\uc774 \ubcc0\uacbd\ub41c \uac83\uc744 \ud655\uc778\ud55c \ud6c4, Docker\ub97c \uc7ac\uc2dc\uc791\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubcc0\uacbd \uc0ac\ud56d\uc774 \ubc18\uc601\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,r.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin daemonset\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin pod\uc774 RUNNING \uc0c1\ud0dc\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"node \uc815\ubcf4\uc5d0 gpu\uac00 \uc0ac\uc6a9\uac00\ub2a5\ud558\ub3c4\ub85d \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","(",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c \ud074\ub7ec\uc2a4\ud130\ub294 2\uac1c\uc758 GPU\uac00 \uc788\uc5b4\uc11c 2\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4.\n\ubcf8\uc778\uc758 \ud074\ub7ec\uc2a4\ud130\uc758 GPU \uac1c\uc218\uc640 \ub9de\ub294 \uc22b\uc790\uac00 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub429\ub2c8\ub2e4.)"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")))),(0,r.kt)("p",null,"\uc124\uc815\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0, GPU\uc758 value\uac00 ",(0,r.kt)("inlineCode",{parentName:"p"},"")," \uc73c\ub85c \ud45c\uc2dc\ub429\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/6fb0e7c9.531715dd.js b/assets/js/6fb0e7c9.e4a94a4f.js similarity index 99% rename from assets/js/6fb0e7c9.531715dd.js rename to assets/js/6fb0e7c9.e4a94a4f.js index 1e656338..3cfcb9b0 100644 --- a/assets/js/6fb0e7c9.531715dd.js +++ b/assets/js/6fb0e7c9.e4a94a4f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5105],{3905:(t,e,n)=>{n.d(e,{Zo:()=>m,kt:()=>c});var r=n(7294);function a(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function l(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,r)}return n}function o(t){for(var e=1;e=0||(a[n]=t[n]);return a}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(a[n]=t[n])}return a}var i=r.createContext({}),u=function(t){var e=r.useContext(i),n=e;return t&&(n="function"==typeof t?t(e):o(o({},e),t)),n},m=function(t){var e=u(t.components);return r.createElement(i.Provider,{value:e},t.children)},s="mdxType",d={inlineCode:"code",wrapper:function(t){var e=t.children;return r.createElement(r.Fragment,{},e)}},k=r.forwardRef((function(t,e){var n=t.components,a=t.mdxType,l=t.originalType,i=t.parentName,m=p(t,["components","mdxType","originalType","parentName"]),s=u(n),k=a,c=s["".concat(i,".").concat(k)]||s[k]||d[k]||l;return n?r.createElement(c,o(o({ref:e},m),{},{components:n})):r.createElement(c,o({ref:e},m))}));function c(t,e){var n=arguments,a=e&&e.mdxType;if("string"==typeof t||a){var l=n.length,o=new Array(l);o[0]=k;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[s]="string"==typeof t?t:a,o[1]=p;for(var u=2;u{n.r(e),n.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>p,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const l={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},o=void 0,p={unversionedId:"setup-kubernetes/intro",id:"version-1.0/setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/versioned_docs/version-1.0/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/docs/1.0/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/docs/1.0/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/docs/1.0/setup-kubernetes/kubernetes"}},i={},u=[{value:"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30",id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30",level:2},{value:"\uad6c\uc131 \uc694\uc18c",id:"\uad6c\uc131-\uc694\uc18c",level:2},{value:"\ud074\ub7ec\uc2a4\ud130",id:"\ud074\ub7ec\uc2a4\ud130",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"\ud074\ub77c\uc774\uc5b8\ud2b8",id:"\ud074\ub77c\uc774\uc5b8\ud2b8",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],m={toc:u},s="wrapper";function d(t){let{components:e,...n}=t;return(0,a.kt)(s,(0,r.Z)({},m,n,{components:e,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30"},"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30"),(0,a.kt)("p",null,"MLOps\ub97c \uacf5\ubd80\ud558\ub294 \ub370 \uc788\uc5b4\uc11c \uac00\uc7a5 \ud070 \uc7a5\ubcbd\uc740 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud574\ubcf4\uace0 \uc0ac\uc6a9\ud574\ubcf4\uae30\uac00 \uc5b4\ub835\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4. AWS, GCP \ub4f1\uc758 \ud37c\ube14\ub9ad \ud074\ub77c\uc6b0\ub4dc \ud639\uc740 Weight & Bias, neptune.ai \ub4f1\uc758 \uc0c1\uc6a9 \ud234\uc744 \uc0ac\uc6a9\ud574\ubcf4\uae30\uc5d0\ub294 \uacfc\uae08\uc5d0 \ub300\ud55c \ubd80\ub2f4\uc774 \uc874\uc7ac\ud558\uace0, \ucc98\uc74c\ubd80\ud130 \ubaa8\ub4e0 \ud658\uacbd\uc744 \ud63c\uc790\uc11c \uad6c\uc131\ud558\uae30\uc5d0\ub294 \uc5b4\ub514\uc11c\ubd80\ud130 \uc2dc\uc791\ud574\uc57c \ud560\uc9c0 \ub9c9\ub9c9\ud558\uac8c \ub290\uaef4\uc9c8 \uc218\ubc16\uc5d0 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7f0 \uc774\uc720\ub4e4\ub85c MLOps\ub97c \uc120\ub73b \uc2dc\uc791\ud574\ubcf4\uc9c0 \ubabb\ud558\uc2dc\ub294 \ubd84\ub4e4\uc744 \uc704\ud574, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub9cc \uc900\ube44\ub418\uc5b4 \uc788\ub2e4\uba74 MLOps \uc2dc\uc2a4\ud15c\uc744 \ubc11\ubc14\ub2e5\ubd80\ud130 \uad6c\ucd95\ud558\uace0 \uc0ac\uc6a9\ud574 \ubcfc \uc218 \uc788\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0, \uac00\uc0c1\uba38\uc2e0\uc744 \ud65c\uc6a9\ud558\uc5ec \ud658\uacbd\uc744 \uad6c\uc131\ud558\uae30"),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"Windows \ud639\uc740 Intel Mac\uc744 \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"p"},"\ubaa8\ub450\uc758 MLops")," \uc2e4\uc2b5\uc744 \uc9c4\ud589 \uc911\uc778 \ubd84\ub4e4\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Virtual Box"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"VMware")," \ub4f1\uc758 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub97c \uc774\uc6a9\ud558\uc5ec \uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uad8c\uc7a5 \uc0ac\uc591\uc744 \ub9de\ucdb0 \uac00\uc0c1 \uba38\uc2e0\uc744 \uc0dd\uc131\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.\n\ub610\ud55c, M1 Mac\uc744 \uc0ac\uc6a9\ud558\uc2dc\ub294 \ubd84\ub4e4\uc740 \uc791\uc131\uc77c(2022\ub144 2\uc6d4) \uae30\uc900\uc73c\ub85c\ub294 Virtual Box, VMware \ub294 \uc774\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"M1 Apple Silicone Mac\uc5d0 \ucd5c\uc801\ud654\ub41c macOS \uc571 \uc9c0\uc6d0 \ud655\uc778\ud558\uae30"),")\n\ub530\ub77c\uc11c, \ud074\ub77c\uc6b0\ub4dc \ud658\uacbd\uc744 \uc774\uc6a9\ud574 \uc2e4\uc2b5\ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c\uba74, ",(0,a.kt)("a",{parentName:"p",href:"https://mac.getutm.app/"},"UTM , Virtual machines for Mac"),"\uc744 \uc124\uce58\ud558\uc5ec \uac00\uc0c1 \uba38\uc2e0\uc744 \uc774\uc6a9\ud574\uc8fc\uc138\uc694.\n(\uc571\uc2a4\ud1a0\uc5b4\uc5d0\uc11c \uad6c\ub9e4\ud558\uc5ec \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 \uc77c\uc885\uc758 Donation \uac1c\ub150\uc758 \ube44\uc6a9 \uc9c0\ubd88\uc785\ub2c8\ub2e4. \ubb34\ub8cc \ubc84\uc804\uacfc \uc790\ub3d9 \uc5c5\ub370\uc774\ud2b8 \uc815\ub3c4\uc758 \ucc28\uc774\uac00 \uc788\uc5b4, \ubb34\ub8cc\ubc84\uc804\uc744 \uc0ac\uc6a9\ud574\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4.)\n\ud574\ub2f9 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"Ubuntu 20.04.3 LTS")," \uc2e4\uc2b5 \uc6b4\uc601\uccb4\uc81c\ub97c \uc9c0\uc6d0\ud558\uace0 \uc788\uc5b4, M1 Mac\uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc218\ud589\ud558\ub294 \uac83\uc744 \uac00\ub2a5\ud558\uac8c \ud569\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\ud558\uc9c0\ub9cc ",(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \uc694\uc18c\ub4e4\uc744 \ubaa8\ub450 \uc0ac\uc6a9\ud574\ubcfc \uc218\ub294 \uc5c6\uae30\uc5d0, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4\ub9cc\uc744 \uc124\uce58\ud55c \ub4a4, \uc11c\ub85c \uc5f0\ub3d9\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 \ubd80\ubd84\uc744 \uc8fc\ub85c \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uc624\ud508\uc18c\uc2a4\uac00 \ud45c\uc900\uc744 \uc758\ubbf8\ud558\ub294 \uac83\uc740 \uc544\ub2c8\uba70, \uc5ec\ub7ec\ubd84\uc758 \uc0c1\ud669\uc5d0 \ub9de\uac8c \uc801\uc808\ud55c \ud234\uc744 \ucde8\uc0ac\uc120\ud0dd\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\uad6c\uc131-\uc694\uc18c"},"\uad6c\uc131 \uc694\uc18c"),(0,a.kt)("p",null,"\uc774 \uae00\uc5d0\uc11c \ub9cc\ub4e4\uc5b4 \ubcfc MLOps \uc2dc\uc2a4\ud15c\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uacfc \uac01 \ubc84\uc804\uc740 \uc544\ub798\uc640 \uac19\uc740 \ud658\uacbd\uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6d0\ud65c\ud55c \ud658\uacbd\uc5d0\uc11c \ud14c\uc2a4\ud2b8\ud558\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"\uc2f1\uae00 \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130 (\ud639\uc740 \ud074\ub7ec\uc2a4\ud130)")," \uc640 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub97c \ubd84\ub9ac\ud558\uc5ec \uc124\uba85\ud574 \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub294 \ub178\ud2b8\ubd81 \ud639\uc740 \ud074\ub7ec\uc2a4\ud130\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \uc678\uc758 \ud074\ub77c\uc774\uc5b8\ud2b8\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub2e4\ub978 \ub370\uc2a4\ud06c\ud1b1\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ub450 \ub300\uc758 \uba38\uc2e0\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub2e4\uba74 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \ub3d9\uc2dc\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud558\uc154\ub3c4 \uad1c\ucc2e\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"\ud074\ub7ec\uc2a4\ud130"},"\ud074\ub7ec\uc2a4\ud130"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"\uc544\ub798\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc124\uce58\ud574\uc57c \ud560 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA-Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"\uc544\ub798\ub294 Helm\uc744 \uc774\uc6a9\ud574 \uc124\uce58\ub418\uc5b4\uc57c \ud560 \uc368\ub4dc\ud30c\ud2f0 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"\ud074\ub77c\uc774\uc5b8\ud2b8"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),(0,a.kt)("p",null,"\ud074\ub77c\uc774\uc5b8\ud2b8\ub294 MacOS (Intel CPU), Ubuntu 20.04 \uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"\ubaa8\ub450\uc758 MLOps\ub97c \uc124\uce58\ud560 \ud074\ub7ec\uc2a4\ud130\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0ac\uc591\uc744 \ub9cc\uc871\uc2dc\ud0a4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 Kubernetes \ubc0f Kubeflow \uc758 \uad8c\uc7a5 \uc0ac\uc591\uc5d0 \uc758\uc874\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU : 6 core"),(0,a.kt)("li",{parentName:"ul"},"RAM : 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK : 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU : NVIDIA GPU (Optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5105],{3905:(t,e,n)=>{n.d(e,{Zo:()=>m,kt:()=>c});var r=n(7294);function a(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function l(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,r)}return n}function o(t){for(var e=1;e=0||(a[n]=t[n]);return a}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(a[n]=t[n])}return a}var i=r.createContext({}),u=function(t){var e=r.useContext(i),n=e;return t&&(n="function"==typeof t?t(e):o(o({},e),t)),n},m=function(t){var e=u(t.components);return r.createElement(i.Provider,{value:e},t.children)},s="mdxType",d={inlineCode:"code",wrapper:function(t){var e=t.children;return r.createElement(r.Fragment,{},e)}},k=r.forwardRef((function(t,e){var n=t.components,a=t.mdxType,l=t.originalType,i=t.parentName,m=p(t,["components","mdxType","originalType","parentName"]),s=u(n),k=a,c=s["".concat(i,".").concat(k)]||s[k]||d[k]||l;return n?r.createElement(c,o(o({ref:e},m),{},{components:n})):r.createElement(c,o({ref:e},m))}));function c(t,e){var n=arguments,a=e&&e.mdxType;if("string"==typeof t||a){var l=n.length,o=new Array(l);o[0]=k;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[s]="string"==typeof t?t:a,o[1]=p;for(var u=2;u{n.r(e),n.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>p,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const l={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},o=void 0,p={unversionedId:"setup-kubernetes/intro",id:"version-1.0/setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/versioned_docs/version-1.0/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/docs/1.0/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/docs/1.0/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/docs/1.0/setup-kubernetes/kubernetes"}},i={},u=[{value:"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30",id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30",level:2},{value:"\uad6c\uc131 \uc694\uc18c",id:"\uad6c\uc131-\uc694\uc18c",level:2},{value:"\ud074\ub7ec\uc2a4\ud130",id:"\ud074\ub7ec\uc2a4\ud130",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"\ud074\ub77c\uc774\uc5b8\ud2b8",id:"\ud074\ub77c\uc774\uc5b8\ud2b8",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],m={toc:u},s="wrapper";function d(t){let{components:e,...n}=t;return(0,a.kt)(s,(0,r.Z)({},m,n,{components:e,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30"},"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30"),(0,a.kt)("p",null,"MLOps\ub97c \uacf5\ubd80\ud558\ub294 \ub370 \uc788\uc5b4\uc11c \uac00\uc7a5 \ud070 \uc7a5\ubcbd\uc740 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud574\ubcf4\uace0 \uc0ac\uc6a9\ud574\ubcf4\uae30\uac00 \uc5b4\ub835\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4. AWS, GCP \ub4f1\uc758 \ud37c\ube14\ub9ad \ud074\ub77c\uc6b0\ub4dc \ud639\uc740 Weight & Bias, neptune.ai \ub4f1\uc758 \uc0c1\uc6a9 \ud234\uc744 \uc0ac\uc6a9\ud574\ubcf4\uae30\uc5d0\ub294 \uacfc\uae08\uc5d0 \ub300\ud55c \ubd80\ub2f4\uc774 \uc874\uc7ac\ud558\uace0, \ucc98\uc74c\ubd80\ud130 \ubaa8\ub4e0 \ud658\uacbd\uc744 \ud63c\uc790\uc11c \uad6c\uc131\ud558\uae30\uc5d0\ub294 \uc5b4\ub514\uc11c\ubd80\ud130 \uc2dc\uc791\ud574\uc57c \ud560\uc9c0 \ub9c9\ub9c9\ud558\uac8c \ub290\uaef4\uc9c8 \uc218\ubc16\uc5d0 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7f0 \uc774\uc720\ub4e4\ub85c MLOps\ub97c \uc120\ub73b \uc2dc\uc791\ud574\ubcf4\uc9c0 \ubabb\ud558\uc2dc\ub294 \ubd84\ub4e4\uc744 \uc704\ud574, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub9cc \uc900\ube44\ub418\uc5b4 \uc788\ub2e4\uba74 MLOps \uc2dc\uc2a4\ud15c\uc744 \ubc11\ubc14\ub2e5\ubd80\ud130 \uad6c\ucd95\ud558\uace0 \uc0ac\uc6a9\ud574 \ubcfc \uc218 \uc788\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0, \uac00\uc0c1\uba38\uc2e0\uc744 \ud65c\uc6a9\ud558\uc5ec \ud658\uacbd\uc744 \uad6c\uc131\ud558\uae30"),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"Windows \ud639\uc740 Intel Mac\uc744 \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"p"},"\ubaa8\ub450\uc758 MLops")," \uc2e4\uc2b5\uc744 \uc9c4\ud589 \uc911\uc778 \ubd84\ub4e4\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Virtual Box"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"VMware")," \ub4f1\uc758 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub97c \uc774\uc6a9\ud558\uc5ec \uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uad8c\uc7a5 \uc0ac\uc591\uc744 \ub9de\ucdb0 \uac00\uc0c1 \uba38\uc2e0\uc744 \uc0dd\uc131\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.\n\ub610\ud55c, M1 Mac\uc744 \uc0ac\uc6a9\ud558\uc2dc\ub294 \ubd84\ub4e4\uc740 \uc791\uc131\uc77c(2022\ub144 2\uc6d4) \uae30\uc900\uc73c\ub85c\ub294 Virtual Box, VMware \ub294 \uc774\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"M1 Apple Silicone Mac\uc5d0 \ucd5c\uc801\ud654\ub41c macOS \uc571 \uc9c0\uc6d0 \ud655\uc778\ud558\uae30"),")\n\ub530\ub77c\uc11c, \ud074\ub77c\uc6b0\ub4dc \ud658\uacbd\uc744 \uc774\uc6a9\ud574 \uc2e4\uc2b5\ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c\uba74, ",(0,a.kt)("a",{parentName:"p",href:"https://mac.getutm.app/"},"UTM , Virtual machines for Mac"),"\uc744 \uc124\uce58\ud558\uc5ec \uac00\uc0c1 \uba38\uc2e0\uc744 \uc774\uc6a9\ud574\uc8fc\uc138\uc694.\n(\uc571\uc2a4\ud1a0\uc5b4\uc5d0\uc11c \uad6c\ub9e4\ud558\uc5ec \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 \uc77c\uc885\uc758 Donation \uac1c\ub150\uc758 \ube44\uc6a9 \uc9c0\ubd88\uc785\ub2c8\ub2e4. \ubb34\ub8cc \ubc84\uc804\uacfc \uc790\ub3d9 \uc5c5\ub370\uc774\ud2b8 \uc815\ub3c4\uc758 \ucc28\uc774\uac00 \uc788\uc5b4, \ubb34\ub8cc\ubc84\uc804\uc744 \uc0ac\uc6a9\ud574\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4.)\n\ud574\ub2f9 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"Ubuntu 20.04.3 LTS")," \uc2e4\uc2b5 \uc6b4\uc601\uccb4\uc81c\ub97c \uc9c0\uc6d0\ud558\uace0 \uc788\uc5b4, M1 Mac\uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc218\ud589\ud558\ub294 \uac83\uc744 \uac00\ub2a5\ud558\uac8c \ud569\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\ud558\uc9c0\ub9cc ",(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \uc694\uc18c\ub4e4\uc744 \ubaa8\ub450 \uc0ac\uc6a9\ud574\ubcfc \uc218\ub294 \uc5c6\uae30\uc5d0, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4\ub9cc\uc744 \uc124\uce58\ud55c \ub4a4, \uc11c\ub85c \uc5f0\ub3d9\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 \ubd80\ubd84\uc744 \uc8fc\ub85c \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uc624\ud508\uc18c\uc2a4\uac00 \ud45c\uc900\uc744 \uc758\ubbf8\ud558\ub294 \uac83\uc740 \uc544\ub2c8\uba70, \uc5ec\ub7ec\ubd84\uc758 \uc0c1\ud669\uc5d0 \ub9de\uac8c \uc801\uc808\ud55c \ud234\uc744 \ucde8\uc0ac\uc120\ud0dd\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\uad6c\uc131-\uc694\uc18c"},"\uad6c\uc131 \uc694\uc18c"),(0,a.kt)("p",null,"\uc774 \uae00\uc5d0\uc11c \ub9cc\ub4e4\uc5b4 \ubcfc MLOps \uc2dc\uc2a4\ud15c\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uacfc \uac01 \ubc84\uc804\uc740 \uc544\ub798\uc640 \uac19\uc740 \ud658\uacbd\uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6d0\ud65c\ud55c \ud658\uacbd\uc5d0\uc11c \ud14c\uc2a4\ud2b8\ud558\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"\uc2f1\uae00 \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130 (\ud639\uc740 \ud074\ub7ec\uc2a4\ud130)")," \uc640 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub97c \ubd84\ub9ac\ud558\uc5ec \uc124\uba85\ud574 \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub294 \ub178\ud2b8\ubd81 \ud639\uc740 \ud074\ub7ec\uc2a4\ud130\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \uc678\uc758 \ud074\ub77c\uc774\uc5b8\ud2b8\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub2e4\ub978 \ub370\uc2a4\ud06c\ud1b1\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ub450 \ub300\uc758 \uba38\uc2e0\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub2e4\uba74 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \ub3d9\uc2dc\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud558\uc154\ub3c4 \uad1c\ucc2e\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"\ud074\ub7ec\uc2a4\ud130"},"\ud074\ub7ec\uc2a4\ud130"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"\uc544\ub798\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc124\uce58\ud574\uc57c \ud560 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA-Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"\uc544\ub798\ub294 Helm\uc744 \uc774\uc6a9\ud574 \uc124\uce58\ub418\uc5b4\uc57c \ud560 \uc368\ub4dc\ud30c\ud2f0 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"\ud074\ub77c\uc774\uc5b8\ud2b8"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),(0,a.kt)("p",null,"\ud074\ub77c\uc774\uc5b8\ud2b8\ub294 MacOS (Intel CPU), Ubuntu 20.04 \uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"\ubaa8\ub450\uc758 MLOps\ub97c \uc124\uce58\ud560 \ud074\ub7ec\uc2a4\ud130\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0ac\uc591\uc744 \ub9cc\uc871\uc2dc\ud0a4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 Kubernetes \ubc0f Kubeflow \uc758 \uad8c\uc7a5 \uc0ac\uc591\uc5d0 \uc758\uc874\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU : 6 core"),(0,a.kt)("li",{parentName:"ul"},"RAM : 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK : 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU : NVIDIA GPU (Optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/751a793c.78750b3e.js b/assets/js/751a793c.f474394d.js similarity index 99% rename from assets/js/751a793c.78750b3e.js rename to assets/js/751a793c.f474394d.js index 52406783..c63339bc 100644 --- a/assets/js/751a793c.78750b3e.js +++ b/assets/js/751a793c.f474394d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8932],{3905:(e,n,a)=>{a.d(n,{Zo:()=>u,kt:()=>m});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=t.createContext({}),s=function(e){var n=t.useContext(i),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},u=function(e){var n=s(e.components);return t.createElement(i.Provider,{value:n},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,i=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),c=s(a),d=r,m=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return a?t.createElement(m,o(o({ref:n},u),{},{components:a})):t.createElement(m,o({ref:n},u))}));function m(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=d;var p={};for(var i in n)hasOwnProperty.call(n,i)&&(p[i]=n[i]);p.originalType=e,p[c]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{a.r(n),a.d(n,{assets:()=>i,contentTitle:()=>o,default:()=>k,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,p={unversionedId:"prerequisites/docker/command",id:"prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/docs/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/docs/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/command.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/docs/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/docs/prerequisites/docker/images"}},i={},s=[{value:"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"9. Docker rm",id:"9-docker-rm",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:s},c="wrapper";function k(e){let{components:n,...a}=e;return(0,r.kt)(c,(0,t.Z)({},u,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," sudo \uc5c6\uc774 \uc0ac\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 \uc544\ub798 \uc0ac\uc774\ud2b8\ub97c \ucc38\uace0\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"docker image registry(\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub294 \uc800\uc7a5\uc18c)\ub85c\ubd80\ud130 Docker image \ub97c \ub85c\uceec\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc544\ub798 \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 docker pull\uc5d0\uc11c \uc0ac\uc6a9 \uac00\ub2a5\ud55c argument\ub4e4\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc54c \uc218 \uc788\ub294 \uac83\uc740 \ubc14\ub85c docker pull\uc740 \ub450 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"help\uc5d0\uc11c \ub098\uc628 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a"),", -",(0,r.kt)("inlineCode",{parentName:"p"},"q")," \uc635\uc158\uc744 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NAME \uc55e\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc9c1\uc811 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," \uc774\ubbf8\uc9c0\ub97c pull \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud574\uc11d\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu")," \ub77c\ub294 \uc774\ub984\uc744 \uac00\uc9c4 \uc774\ubbf8\uc9c0 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," \ud0dc\uadf8\uac00 \ub2ec\ub824\uc788\ub294 \uc774\ubbf8\uc9c0\ub97c \uac00\uc838\uc624\ub77c\ub294 \ub73b\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d, \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uba74 ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," \ub77c\ub294 \uc774\ub984\uc758 registry \uc5d0\uc11c ubuntu:18.04 \ub77c\ub294 image \ub97c \uc5ec\ub7ec\ubd84\uc758 \ub178\ud2b8\ubd81\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uac8c\ub429\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ucc38\uace0\uc0ac\ud56d",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\ud6c4 ",(0,r.kt)("a",{parentName:"li",href:"http://docker.io"},"docker.io")," \ub098 public \ud55c docker hub \uc640 \uac19\uc740 registry \ub300\uc2e0\uc5d0, \ud2b9\uc815 ",(0,r.kt)("strong",{parentName:"li"},"private")," \ud55c registry \uc5d0\uc11c docker image \ub97c \uac00\uc838\uc640\uc57c \ud558\ub294 \uacbd\uc6b0\uc5d0\ub294, ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," \uc744 \ud1b5\ud574\uc11c \ud2b9\uc815 registry \ub97c \ubc14\ub77c\ubcf4\ub3c4\ub85d \ud55c \ub4a4, docker pull \uc744 \uc218\ud589\ud558\ub294 \ud615\ud0dc\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ud639\uc740 insecure registry \ub97c \uc124\uc815\ud558\ub294 ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"\ubc29\uc548"),"\ub3c4 \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\ud3d0\uc1c4\ub9dd\uc5d0\uc11c docker image \ub97c ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," \ud30c\uc77c\uacfc \uac19\uc740 \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save")),", ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," \uc640 \uac19\uc740 \uba85\ub839\uc5b4\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0 \uc874\uc7ac\ud558\ub294 docker image \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"docker images\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \ub3c4\ucee4\ub97c \ucd5c\ucd08 \uc124\uce58 \ud6c4 \uc774 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"\uc904 \uc218 \uc788\ub294 argument\uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"-q"),"\ub97c \uc0ac\uc6a9\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," \ub9cc \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"docker ps\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\uac00 \uc5c6\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \uc2e4\ud589\ub418\ub294 \ucee8\ud14c\uc774\ub108\uac00 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"docker run\uc744 \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc6b0\ub9ac\uac00 \ud655\uc778\ud574\uc57c \ud558\ub294 \uac83\uc740 \ubc14\ub85c docker run\uc740 \uc138 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"\uc9c1\uc811 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it")," : ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," \uc635\uc158 + ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," \uc635\uc158",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"container \ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 interactive \ud55c terminal \ub85c \uc811\uc18d\uc2dc\ucf1c\uc8fc\ub294 \uc635\uc158"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name")," : name",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 id \ub300\uc2e0, \uad6c\ubd84\ud558\uae30 \uc27d\ub3c4\ub85d \uc9c0\uc815\ud574\uc8fc\ub294 \uc774\ub984"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 \uc2e4\ud589\ud560 \ucee4\ub9e8\ub4dc\ub85c, ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," \ub294 bash \uc258\uc744 \uc5ec\ub294 \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ucee8\ud14c\uc774\ub108\ub97c \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774 \uc81c \uc55e\uc11c \ubc30\uc6e0\ub358 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\uc2e4\ud589\ub418\uace0 \uc788\ub294 \ucee8\ud14c\uc774\ub108\uac00 \ub098\uc628\ub2e4\uace0 \ud588\uc9c0\ub9cc \uc5b4\uc9f8\uc11c\uc778\uc9c0 \ubc29\uae08 \uc2e4\ud589\ud55c \ucee8\ud14c\uc774\ub108\uac00 \ubcf4\uc774\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uadf8 \uc774\uc720\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub294 \uae30\ubcf8\uac12\uc73c\ub85c \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ubcf4\uc5ec\uc8fc\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \ubcf4\uace0 \uc2f6\ub2e4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," \uc635\uc158\uc744 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\ub3c4 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uba85\ub839\uc744 \ub0b4\ub9ac\uac70\ub098, \ub0b4\ubd80\ub85c \uc811\uc18d\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," \uc635\uc158\uc740 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \uc2e4\ud589\uc2dc\ucf1c\uc11c, \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \uc811\uc18d \uc885\ub8cc\ub97c \ud558\ub354\ub77c\ub3c4, \uacc4\uc18d \uc2e4\ud589 \uc911\uc774 \ub418\ub3c4\ub85d \ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589\uc911\uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589 \uc911\uc784\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \uc2e4\ud589\uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc5d0 \uc811\uc18d\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"\uc774 \uc804\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run"),"\uacfc \ub3d9\uc77c\ud558\uac8c container \ub0b4\ubd80\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"\uc744 \ud1b5\ud574 \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc758 log\ub97c \ud655\uc778\ud558\ub294 \ucee4\ub9e8\ub4dc \uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c test \ub77c\ub294 \uc774\ub984\uc758 busybox \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub85c \uc2e4\ud589\ud558\uc5ec, 1\ucd08\uc5d0 \ud55c \ubc88\uc529 \ud604\uc7ac \uc2dc\uac04\uc744 \ucd9c\ub825\ud558\ub3c4\ub85d \ud588\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 log\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774\ub807\uac8c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc5ec\ud0dc\uae4c\uc9c0 \ucc0d\ud78c log \ubc16\uc5d0 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," \uc635\uc158\uc744 \uc774\uc6a9\ud574 \uacc4\uc18d watch \ud558\uba70 \ucd9c\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"\uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc911\ub2e8\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop")," \uc744 \ud1b5\ud574 \ub3c4\ucee4\ub97c \uc815\uc9c0\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ub2e4\uc2dc \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"\uc704\uc758 \uacb0\uacfc\uc640 \ube44\uad50\ud588\uc744 \ub54c demo2 \ucee8\ud14c\uc774\ub108\uac00 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\uc5d0\uc11c \uc0ac\ub77c\uc9c4 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub3c4 \uc815\uc9c0\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("h2",{id:"9-docker-rm"},"9. Docker rm"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc885\ub8cc\uac00 \ub41c \uc0c1\ud0dc\ub85c \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),"\ub97c \ud1b5\ud574\uc11c \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub3c4 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc65c \uc9c0\uc6cc\uc57c \ud560\uae4c\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\uc885\ub8cc\ub418\uc5b4 \uc788\ub294 \ub3c4\ucee4\uc5d0\ub294 \uc774\uc804\uc5d0 \uc0ac\uc6a9\ud55c \ub370\uc774\ud130\uac00 \uc544\uc9c1 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0 \ub0a8\uc544\uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub798\uc11c restart \ub4f1\uc744 \ud1b5\ud574\uc11c \ucee8\ud14c\uc774\ub108\ub97c \uc7ac\uc2dc\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc774 \uacfc\uc815\uc5d0\uc11c disk\ub97c \uc0ac\uc6a9\ud558\uac8c \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub798\uc11c \uc644\uc804\ud788 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \ucee8\ud14c\uc774\ub108\ub97c \uc9c0\uc6b0\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," \uba85\ub839\uc5b4\ub97c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud604\uc7ac \ucee8\ud14c\uc774\ub108\ub4e4\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 3\uac1c\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"demo3")," \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac1c\ub85c \uc904\uc5c8\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \uc0ad\uc81c\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud604\uc7ac \uc5b4\ub5a4 \uc774\ubbf8\uc9c0\ub4e4\uc774 \ub85c\uceec\uc5d0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"\ub2e4\uc2dc ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images"),"\ub97c \uce60 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}k.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8932],{3905:(e,n,a)=>{a.d(n,{Zo:()=>u,kt:()=>m});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function o(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=t.createContext({}),s=function(e){var n=t.useContext(i),a=n;return e&&(a="function"==typeof e?e(n):o(o({},n),e)),a},u=function(e){var n=s(e.components);return t.createElement(i.Provider,{value:n},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,i=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),c=s(a),d=r,m=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return a?t.createElement(m,o(o({ref:n},u),{},{components:a})):t.createElement(m,o({ref:n},u))}));function m(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,o=new Array(l);o[0]=d;var p={};for(var i in n)hasOwnProperty.call(n,i)&&(p[i]=n[i]);p.originalType=e,p[c]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{a.r(n),a.d(n,{assets:()=>i,contentTitle:()=>o,default:()=>k,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,p={unversionedId:"prerequisites/docker/command",id:"prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/docs/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/docs/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/command.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/docs/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/docs/prerequisites/docker/images"}},i={},s=[{value:"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"9. Docker rm",id:"9-docker-rm",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:s},c="wrapper";function k(e){let{components:n,...a}=e;return(0,r.kt)(c,(0,t.Z)({},u,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"1. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," sudo \uc5c6\uc774 \uc0ac\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 \uc544\ub798 \uc0ac\uc774\ud2b8\ub97c \ucc38\uace0\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"docker image registry(\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub294 \uc800\uc7a5\uc18c)\ub85c\ubd80\ud130 Docker image \ub97c \ub85c\uceec\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc544\ub798 \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 docker pull\uc5d0\uc11c \uc0ac\uc6a9 \uac00\ub2a5\ud55c argument\ub4e4\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc54c \uc218 \uc788\ub294 \uac83\uc740 \ubc14\ub85c docker pull\uc740 \ub450 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"help\uc5d0\uc11c \ub098\uc628 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a"),", -",(0,r.kt)("inlineCode",{parentName:"p"},"q")," \uc635\uc158\uc744 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NAME \uc55e\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc9c1\uc811 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," \uc774\ubbf8\uc9c0\ub97c pull \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud574\uc11d\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu")," \ub77c\ub294 \uc774\ub984\uc744 \uac00\uc9c4 \uc774\ubbf8\uc9c0 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," \ud0dc\uadf8\uac00 \ub2ec\ub824\uc788\ub294 \uc774\ubbf8\uc9c0\ub97c \uac00\uc838\uc624\ub77c\ub294 \ub73b\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d, \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uba74 ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," \ub77c\ub294 \uc774\ub984\uc758 registry \uc5d0\uc11c ubuntu:18.04 \ub77c\ub294 image \ub97c \uc5ec\ub7ec\ubd84\uc758 \ub178\ud2b8\ubd81\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uac8c\ub429\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ucc38\uace0\uc0ac\ud56d",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\ud6c4 ",(0,r.kt)("a",{parentName:"li",href:"http://docker.io"},"docker.io")," \ub098 public \ud55c docker hub \uc640 \uac19\uc740 registry \ub300\uc2e0\uc5d0, \ud2b9\uc815 ",(0,r.kt)("strong",{parentName:"li"},"private")," \ud55c registry \uc5d0\uc11c docker image \ub97c \uac00\uc838\uc640\uc57c \ud558\ub294 \uacbd\uc6b0\uc5d0\ub294, ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," \uc744 \ud1b5\ud574\uc11c \ud2b9\uc815 registry \ub97c \ubc14\ub77c\ubcf4\ub3c4\ub85d \ud55c \ub4a4, docker pull \uc744 \uc218\ud589\ud558\ub294 \ud615\ud0dc\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ud639\uc740 insecure registry \ub97c \uc124\uc815\ud558\ub294 ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"\ubc29\uc548"),"\ub3c4 \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\ud3d0\uc1c4\ub9dd\uc5d0\uc11c docker image \ub97c ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," \ud30c\uc77c\uacfc \uac19\uc740 \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uace0 \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save")),", ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," \uc640 \uac19\uc740 \uba85\ub839\uc5b4\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0 \uc874\uc7ac\ud558\ub294 docker image \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"docker images\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \ub3c4\ucee4\ub97c \ucd5c\ucd08 \uc124\uce58 \ud6c4 \uc774 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"\uc904 \uc218 \uc788\ub294 argument\uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"-q"),"\ub97c \uc0ac\uc6a9\ud558\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," \ub9cc \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub9ac\uc2a4\ud2b8\ub97c \ucd9c\ub825\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"docker ps\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 argument\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc9c1\uc811 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"\ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\uac00 \uc5c6\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\ub9cc\uc57d \uc2e4\ud589\ub418\ub294 \ucee8\ud14c\uc774\ub108\uac00 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"docker run\uc744 \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c \uc6b0\ub9ac\uac00 \ud655\uc778\ud574\uc57c \ud558\ub294 \uac83\uc740 \ubc14\ub85c docker run\uc740 \uc138 \uac1c \ud0c0\uc785\uc758 argument\ub97c \ubc1b\ub294\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"\uc9c1\uc811 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it")," : ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," \uc635\uc158 + ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," \uc635\uc158",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"container \ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 interactive \ud55c terminal \ub85c \uc811\uc18d\uc2dc\ucf1c\uc8fc\ub294 \uc635\uc158"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name")," : name",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 id \ub300\uc2e0, \uad6c\ubd84\ud558\uae30 \uc27d\ub3c4\ub85d \uc9c0\uc815\ud574\uc8fc\ub294 \uc774\ub984"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0b4\uacfc \ub3d9\uc2dc\uc5d0 \uc2e4\ud589\ud560 \ucee4\ub9e8\ub4dc\ub85c, ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," \ub294 bash \uc258\uc744 \uc5ec\ub294 \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")))),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ucee8\ud14c\uc774\ub108\ub97c \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774 \uc81c \uc55e\uc11c \ubc30\uc6e0\ub358 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"\uc2e4\ud589\ub418\uace0 \uc788\ub294 \ucee8\ud14c\uc774\ub108\uac00 \ub098\uc628\ub2e4\uace0 \ud588\uc9c0\ub9cc \uc5b4\uc9f8\uc11c\uc778\uc9c0 \ubc29\uae08 \uc2e4\ud589\ud55c \ucee8\ud14c\uc774\ub108\uac00 \ubcf4\uc774\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uadf8 \uc774\uc720\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub294 \uae30\ubcf8\uac12\uc73c\ub85c \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ubcf4\uc5ec\uc8fc\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \ubcf4\uace0 \uc2f6\ub2e4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," \uc635\uc158\uc744 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\ub3c4 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uba85\ub839\uc744 \ub0b4\ub9ac\uac70\ub098, \ub0b4\ubd80\ub85c \uc811\uc18d\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"\uc5ec\uae30\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," \uc635\uc158\uc740 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \uc2e4\ud589\uc2dc\ucf1c\uc11c, \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \uc811\uc18d \uc885\ub8cc\ub97c \ud558\ub354\ub77c\ub3c4, \uacc4\uc18d \uc2e4\ud589 \uc911\uc774 \ub418\ub3c4\ub85d \ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589\uc911\uc778\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589 \uc911\uc784\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \uc2e4\ud589\uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc5d0 \uc811\uc18d\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"\uc774 \uc804\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run"),"\uacfc \ub3d9\uc77c\ud558\uac8c container \ub0b4\ubd80\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"\uc744 \ud1b5\ud574 \uc885\ub8cc\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uc758 log\ub97c \ud655\uc778\ud558\ub294 \ucee4\ub9e8\ub4dc \uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c test \ub77c\ub294 \uc774\ub984\uc758 busybox \ucee8\ud14c\uc774\ub108\ub97c \ubc31\uadf8\ub77c\uc6b4\ub4dc\uc5d0\uc11c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub85c \uc2e4\ud589\ud558\uc5ec, 1\ucd08\uc5d0 \ud55c \ubc88\uc529 \ud604\uc7ac \uc2dc\uac04\uc744 \ucd9c\ub825\ud558\ub3c4\ub85d \ud588\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 log\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \uc544\ub798\uc640 \ube44\uc2b7\ud558\uac8c \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774\ub807\uac8c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc5ec\ud0dc\uae4c\uc9c0 \ucc0d\ud78c log \ubc16\uc5d0 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," \uc635\uc158\uc744 \uc774\uc6a9\ud574 \uacc4\uc18d watch \ud558\uba70 \ucd9c\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"\uc2e4\ud589 \uc911\uc778 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc911\ub2e8\uc2dc\ud0a4\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ud1b5\ud574 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop")," \uc744 \ud1b5\ud574 \ub3c4\ucee4\ub97c \uc815\uc9c0\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"\uc2e4\ud589 \ud6c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),"\ub97c \ub2e4\uc2dc \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"\uc704\uc758 \uacb0\uacfc\uc640 \ube44\uad50\ud588\uc744 \ub54c demo2 \ucee8\ud14c\uc774\ub108\uac00 \ud604\uc7ac \uc2e4\ud589 \uc911\uc778 \ucee8\ud14c\uc774\ub108 \ubaa9\ub85d\uc5d0\uc11c \uc0ac\ub77c\uc9c4 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub3c4 \uc815\uc9c0\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("h2",{id:"9-docker-rm"},"9. Docker rm"),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc885\ub8cc\uac00 \ub41c \uc0c1\ud0dc\ub85c \uc788\uc2b5\ub2c8\ub2e4. \uadf8\ub798\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),"\ub97c \ud1b5\ud574\uc11c \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub3c4 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc885\ub8cc\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc65c \uc9c0\uc6cc\uc57c \ud560\uae4c\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\uc885\ub8cc\ub418\uc5b4 \uc788\ub294 \ub3c4\ucee4\uc5d0\ub294 \uc774\uc804\uc5d0 \uc0ac\uc6a9\ud55c \ub370\uc774\ud130\uac00 \uc544\uc9c1 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0 \ub0a8\uc544\uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub798\uc11c restart \ub4f1\uc744 \ud1b5\ud574\uc11c \ucee8\ud14c\uc774\ub108\ub97c \uc7ac\uc2dc\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uadf8\ub7f0\ub370 \uc774 \uacfc\uc815\uc5d0\uc11c disk\ub97c \uc0ac\uc6a9\ud558\uac8c \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub798\uc11c \uc644\uc804\ud788 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \ucee8\ud14c\uc774\ub108\ub97c \uc9c0\uc6b0\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," \uba85\ub839\uc5b4\ub97c \uc0ac\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud604\uc7ac \ucee8\ud14c\uc774\ub108\ub4e4\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 3\uac1c\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"demo3")," \ucee8\ud14c\uc774\ub108\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," \uba85\ub839\uc5b4\ub97c \uce58\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac1c\ub85c \uc904\uc5c8\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"\ub098\uba38\uc9c0 \ucee8\ud14c\uc774\ub108\ub4e4\ub3c4 \uc0ad\uc81c\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud558\ub294 \ucee4\ub9e8\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"\uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud604\uc7ac \uc5b4\ub5a4 \uc774\ubbf8\uc9c0\ub4e4\uc774 \ub85c\uceec\uc5d0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," \uc774\ubbf8\uc9c0\ub97c \uc0ad\uc81c\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"\ub2e4\uc2dc ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images"),"\ub97c \uce60 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/7bb5633a.27b2c850.js b/assets/js/7bb5633a.fd9e2598.js similarity index 98% rename from assets/js/7bb5633a.27b2c850.js rename to assets/js/7bb5633a.fd9e2598.js index 355db2a4..5c5372fe 100644 --- a/assets/js/7bb5633a.27b2c850.js +++ b/assets/js/7bb5633a.fd9e2598.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2803],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>y});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},m=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),u=s(n),d=o,y=u["".concat(p,".").concat(d)]||u[d]||c[d]||a;return n?r.createElement(y,l(l({ref:t},m),{},{components:n})):r.createElement(y,l({ref:t},m))}));function y(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=d;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[u]="string"==typeof e?e:o,l[1]=i;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},l=void 0,i={unversionedId:"api-deployment/what-is-api-deployment",id:"version-1.0/api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/docs/1.0/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/what-is-api-deployment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/docs/1.0/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/docs/1.0/api-deployment/seldon-iris"}},p={},s=[{value:"API Deployment\ub780?",id:"api-deployment\ub780",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],m={toc:s},u="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"api-deployment\ub780"},"API Deployment\ub780?"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ud559\uc2b5\ud55c \ub4a4\uc5d0\ub294 \uc5b4\ub5bb\uac8c \uc0ac\uc6a9\ud574\uc57c \ud560\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uc744 \ud559\uc2b5\ud560 \ub54c\ub294 \ub354 \ub192\uc740 \uc131\ub2a5\uc758 \ubaa8\ub378\uc774 \ub098\uc624\uae30\ub97c \uae30\ub300\ud558\uc9c0\ub9cc, \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud558\uc5ec \ucd94\ub860\uc744 \ud560 \ub54c\ub294 \ube60\ub974\uace0 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc544\ubcf4\uace0 \uc2f6\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud558\uace0\uc790 \ud560 \ub54c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc774\ub098 \ud30c\uc774\uc36c \uc2a4\ud06c\ub9bd\ud2b8\ub97c \ud1b5\ud574 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ub85c\ub4dc\ud55c \ub4a4 \ucd94\ub860\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc \uc774\ub7f0 \ubc29\ubc95\uc740 \ubaa8\ub378\uc774 \ud074\uc218\ub85d \ubaa8\ub378\uc744 \ubd88\ub7ec\uc624\ub294 \ub370 \ub9ce\uc740 \uc2dc\uac04\uc744 \uc18c\uc694\ud558\uac8c \ub418\uc5b4\uc11c \ube44\ud6a8\uc728\uc801\uc785\ub2c8\ub2e4. \ub610\ud55c \uc774\ub807\uac8c \uc774\uc6a9\ud558\uba74 \ub9ce\uc740 \uc0ac\ub78c\uc774 \ubaa8\ub378\uc744 \uc774\uc6a9\ud560 \uc218 \uc5c6\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\ub294 \ud658\uacbd\uc5d0\uc11c\ubc16\uc5d0 \uc0ac\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uadf8\ub798\uc11c \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc774 \uc0ac\uc6a9\ub420 \ub54c\ub294 API\ub97c \uc774\uc6a9\ud574\uc11c \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ubaa8\ub378\uc740 API \uc11c\ubc84\uac00 \uad6c\ub3d9\ub418\ub294 \ud658\uacbd\uc5d0\uc11c \ud55c \ubc88\ub9cc \ub85c\ub4dc\uac00 \ub418\uba70, DNS\ub97c \ud65c\uc6a9\ud558\uc5ec \uc678\ubd80\uc5d0\uc11c\ub3c4 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uace0 \ub2e4\ub978 \uc11c\ube44\uc2a4\uc640 \uc5f0\ub3d9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ud558\uc9c0\ub9cc \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc5d0\ub294 \uc0dd\uac01\ubcf4\ub2e4 \ub9ce\uc740 \ubd80\uc218\uc801\uc778 \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc744 \ub354 \uc27d\uac8c \ud558\uae30 \uc704\ud574\uc11c Tensorflow\uc640 \uac19\uc740 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c \uc9c4\uc601\uc5d0\uc11c\ub294 \ucd94\ub860 \uc5d4\uc9c4(Inference engine)\uc744 \uac1c\ubc1c\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc774\uc6a9\ud558\uba74 \ud574\ub2f9 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c\ub85c \uac1c\ubc1c\ub418\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ubd88\ub7ec\uc640 \ucd94\ub860\uc774 \uac00\ub2a5\ud55c API(REST \ub610\ub294 gRPC)\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\uc744 \ud65c\uc6a9\ud558\uc5ec \uad6c\ucd95\ud55c API \uc11c\ubc84\ub85c \ucd94\ub860\ud558\uace0\uc790 \ud558\ub294 \ub370\uc774\ud130\ub97c \ub2f4\uc544 \uc694\uccad\uc744 \ubcf4\ub0b4\uba74, \ucd94\ub860 \uc5d4\uc9c4\uc774 \ucd94\ub860 \uacb0\uacfc\ub97c \uc751\ub2f5\uc5d0 \ub2f4\uc544 \uc804\uc1a1\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub300\ud45c\uc801\uc73c\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow : Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch : Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"Onnx : Onnx Runtime"))),(0,o.kt)("p",null,"\uc624\ud504\uc18c\uc2a4\uc5d0\uc11c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uc9c0\ub294 \uc54a\uc9c0\ub9cc, \ub9ce\uc774 \uc4f0\uc774\ub294 sklearn, xgboost \ud504\ub808\uc784\uc6cc\ud06c\ub97c \uc704\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub3c4 \uac1c\ubc1c\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c API\uc758 \ud615\ud0dc\ub85c \ubc1b\uc544\ubcfc \uc218 \uc788\ub3c4\ub85d \ubc30\ud3ec\ud558\ub294 \uac83\uc744 ",(0,o.kt)("strong",{parentName:"p"},"API Deployment"),"\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub2e4\uc591\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\ub2e4\ub294 \uc0ac\uc2e4\uc744 \uc18c\uac1c\ud574 \ub4dc\ub838\uc2b5\ub2c8\ub2e4.\n\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0\uc11c \uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud55c\ub2e4\uba74 \uc5b4\ub5a4 \uc791\uc5c5\uc774 \ud544\uc694\ud560\uae4c\uc694?\n\ucd94\ub860 \uc5d4\uc9c4\uc744 \ubc30\ud3ec\ud558\uae30 \uc704\ud55c Deployment, \ucd94\ub860 \uc694\uccad\uc744 \ubcf4\ub0bc Endpoint\ub97c \uc0dd\uc131\ud558\uae30 \uc704\ud55c Service,\n\uc678\ubd80\uc5d0\uc11c\uc758 \ucd94\ub860 \uc694\uccad\uc744 \ucd94\ub860 \uc5d4\uc9c4\uc73c\ub85c \ubcf4\ub0b4\uae30 \uc704\ud55c Ingress \ub4f1 \ub9ce\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ubc30\ud3ec\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774\uac83 \uc774\uc678\uc5d0\ub3c4, \ub9ce\uc740 \ucd94\ub860 \uc694\uccad\uc774 \ub4e4\uc5b4\uc654\uc744 \uacbd\uc6b0\uc758 \uc2a4\ucf00\uc77c \uc544\uc6c3(scale-out), \ucd94\ub860 \uc5d4\uc9c4 \uc0c1\ud0dc\uc5d0 \ub300\ud55c \ubaa8\ub2c8\ud130\ub9c1, \uac1c\uc120\ub41c \ubaa8\ub378\uc774 \ub098\uc654\uc744 \uacbd\uc6b0 \ubc84\uc804 \uc5c5\ub370\uc774\ud2b8 \ub4f1 \ucd94\ub860 \uc5d4\uc9c4\uc744 \uc6b4\uc601\ud560 \ub54c\uc758 \uc694\uad6c\uc0ac\ud56d\uc740 \ud55c\ub450 \uac00\uc9c0\uac00 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ub7ec\ud55c \ub9ce\uc740 \uc694\uad6c\uc0ac\ud56d\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud574 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd \uc704\uc5d0\uc11c \ud55c \ubc88 \ub354 \ucd94\uc0c1\ud654\ud55c ",(0,o.kt)("strong",{parentName:"p"},"Serving Framework"),"\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uac1c\ubc1c\ub41c Serving Framework\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Seldon Core\ub97c \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud558\ub294 \uacfc\uc815\uc744 \ub2e4\ub8e8\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2803],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>y});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},m=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),u=s(n),d=o,y=u["".concat(p,".").concat(d)]||u[d]||c[d]||a;return n?r.createElement(y,l(l({ref:t},m),{},{components:n})):r.createElement(y,l({ref:t},m))}));function y(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=d;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[u]="string"==typeof e?e:o,l[1]=i;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},l=void 0,i={unversionedId:"api-deployment/what-is-api-deployment",id:"version-1.0/api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/docs/1.0/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/what-is-api-deployment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/docs/1.0/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/docs/1.0/api-deployment/seldon-iris"}},p={},s=[{value:"API Deployment\ub780?",id:"api-deployment\ub780",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],m={toc:s},u="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"api-deployment\ub780"},"API Deployment\ub780?"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ud559\uc2b5\ud55c \ub4a4\uc5d0\ub294 \uc5b4\ub5bb\uac8c \uc0ac\uc6a9\ud574\uc57c \ud560\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uc744 \ud559\uc2b5\ud560 \ub54c\ub294 \ub354 \ub192\uc740 \uc131\ub2a5\uc758 \ubaa8\ub378\uc774 \ub098\uc624\uae30\ub97c \uae30\ub300\ud558\uc9c0\ub9cc, \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud558\uc5ec \ucd94\ub860\uc744 \ud560 \ub54c\ub294 \ube60\ub974\uace0 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc544\ubcf4\uace0 \uc2f6\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud558\uace0\uc790 \ud560 \ub54c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc774\ub098 \ud30c\uc774\uc36c \uc2a4\ud06c\ub9bd\ud2b8\ub97c \ud1b5\ud574 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ub85c\ub4dc\ud55c \ub4a4 \ucd94\ub860\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc \uc774\ub7f0 \ubc29\ubc95\uc740 \ubaa8\ub378\uc774 \ud074\uc218\ub85d \ubaa8\ub378\uc744 \ubd88\ub7ec\uc624\ub294 \ub370 \ub9ce\uc740 \uc2dc\uac04\uc744 \uc18c\uc694\ud558\uac8c \ub418\uc5b4\uc11c \ube44\ud6a8\uc728\uc801\uc785\ub2c8\ub2e4. \ub610\ud55c \uc774\ub807\uac8c \uc774\uc6a9\ud558\uba74 \ub9ce\uc740 \uc0ac\ub78c\uc774 \ubaa8\ub378\uc744 \uc774\uc6a9\ud560 \uc218 \uc5c6\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\ub294 \ud658\uacbd\uc5d0\uc11c\ubc16\uc5d0 \uc0ac\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uadf8\ub798\uc11c \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc774 \uc0ac\uc6a9\ub420 \ub54c\ub294 API\ub97c \uc774\uc6a9\ud574\uc11c \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ubaa8\ub378\uc740 API \uc11c\ubc84\uac00 \uad6c\ub3d9\ub418\ub294 \ud658\uacbd\uc5d0\uc11c \ud55c \ubc88\ub9cc \ub85c\ub4dc\uac00 \ub418\uba70, DNS\ub97c \ud65c\uc6a9\ud558\uc5ec \uc678\ubd80\uc5d0\uc11c\ub3c4 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uace0 \ub2e4\ub978 \uc11c\ube44\uc2a4\uc640 \uc5f0\ub3d9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ud558\uc9c0\ub9cc \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc5d0\ub294 \uc0dd\uac01\ubcf4\ub2e4 \ub9ce\uc740 \ubd80\uc218\uc801\uc778 \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc744 \ub354 \uc27d\uac8c \ud558\uae30 \uc704\ud574\uc11c Tensorflow\uc640 \uac19\uc740 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c \uc9c4\uc601\uc5d0\uc11c\ub294 \ucd94\ub860 \uc5d4\uc9c4(Inference engine)\uc744 \uac1c\ubc1c\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc774\uc6a9\ud558\uba74 \ud574\ub2f9 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c\ub85c \uac1c\ubc1c\ub418\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ubd88\ub7ec\uc640 \ucd94\ub860\uc774 \uac00\ub2a5\ud55c API(REST \ub610\ub294 gRPC)\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\uc744 \ud65c\uc6a9\ud558\uc5ec \uad6c\ucd95\ud55c API \uc11c\ubc84\ub85c \ucd94\ub860\ud558\uace0\uc790 \ud558\ub294 \ub370\uc774\ud130\ub97c \ub2f4\uc544 \uc694\uccad\uc744 \ubcf4\ub0b4\uba74, \ucd94\ub860 \uc5d4\uc9c4\uc774 \ucd94\ub860 \uacb0\uacfc\ub97c \uc751\ub2f5\uc5d0 \ub2f4\uc544 \uc804\uc1a1\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub300\ud45c\uc801\uc73c\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow : Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch : Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"Onnx : Onnx Runtime"))),(0,o.kt)("p",null,"\uc624\ud504\uc18c\uc2a4\uc5d0\uc11c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uc9c0\ub294 \uc54a\uc9c0\ub9cc, \ub9ce\uc774 \uc4f0\uc774\ub294 sklearn, xgboost \ud504\ub808\uc784\uc6cc\ud06c\ub97c \uc704\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub3c4 \uac1c\ubc1c\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c API\uc758 \ud615\ud0dc\ub85c \ubc1b\uc544\ubcfc \uc218 \uc788\ub3c4\ub85d \ubc30\ud3ec\ud558\ub294 \uac83\uc744 ",(0,o.kt)("strong",{parentName:"p"},"API Deployment"),"\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub2e4\uc591\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\ub2e4\ub294 \uc0ac\uc2e4\uc744 \uc18c\uac1c\ud574 \ub4dc\ub838\uc2b5\ub2c8\ub2e4.\n\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0\uc11c \uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud55c\ub2e4\uba74 \uc5b4\ub5a4 \uc791\uc5c5\uc774 \ud544\uc694\ud560\uae4c\uc694?\n\ucd94\ub860 \uc5d4\uc9c4\uc744 \ubc30\ud3ec\ud558\uae30 \uc704\ud55c Deployment, \ucd94\ub860 \uc694\uccad\uc744 \ubcf4\ub0bc Endpoint\ub97c \uc0dd\uc131\ud558\uae30 \uc704\ud55c Service,\n\uc678\ubd80\uc5d0\uc11c\uc758 \ucd94\ub860 \uc694\uccad\uc744 \ucd94\ub860 \uc5d4\uc9c4\uc73c\ub85c \ubcf4\ub0b4\uae30 \uc704\ud55c Ingress \ub4f1 \ub9ce\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ubc30\ud3ec\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774\uac83 \uc774\uc678\uc5d0\ub3c4, \ub9ce\uc740 \ucd94\ub860 \uc694\uccad\uc774 \ub4e4\uc5b4\uc654\uc744 \uacbd\uc6b0\uc758 \uc2a4\ucf00\uc77c \uc544\uc6c3(scale-out), \ucd94\ub860 \uc5d4\uc9c4 \uc0c1\ud0dc\uc5d0 \ub300\ud55c \ubaa8\ub2c8\ud130\ub9c1, \uac1c\uc120\ub41c \ubaa8\ub378\uc774 \ub098\uc654\uc744 \uacbd\uc6b0 \ubc84\uc804 \uc5c5\ub370\uc774\ud2b8 \ub4f1 \ucd94\ub860 \uc5d4\uc9c4\uc744 \uc6b4\uc601\ud560 \ub54c\uc758 \uc694\uad6c\uc0ac\ud56d\uc740 \ud55c\ub450 \uac00\uc9c0\uac00 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ub7ec\ud55c \ub9ce\uc740 \uc694\uad6c\uc0ac\ud56d\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud574 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd \uc704\uc5d0\uc11c \ud55c \ubc88 \ub354 \ucd94\uc0c1\ud654\ud55c ",(0,o.kt)("strong",{parentName:"p"},"Serving Framework"),"\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uac1c\ubc1c\ub41c Serving Framework\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Seldon Core\ub97c \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud558\ub294 \uacfc\uc815\uc744 \ub2e4\ub8e8\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/7ef46b74.f1dbfdee.js b/assets/js/7ef46b74.6dbfdf9e.js similarity index 99% rename from assets/js/7ef46b74.f1dbfdee.js rename to assets/js/7ef46b74.6dbfdf9e.js index 771d3dd7..69ff0a6b 100644 --- a/assets/js/7ef46b74.f1dbfdee.js +++ b/assets/js/7ef46b74.6dbfdf9e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2676],{3905:(t,e,n)=>{n.d(e,{Zo:()=>d,kt:()=>f});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function p(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),u=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):p(p({},e),t)),n},d=function(t){var e=u(t.components);return a.createElement(l.Provider,{value:e},t.children)},s="mdxType",m={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},c=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,o=t.originalType,l=t.parentName,d=i(t,["components","mdxType","originalType","parentName"]),s=u(n),c=r,f=s["".concat(l,".").concat(c)]||s[c]||m[c]||o;return n?a.createElement(f,p(p({ref:e},d),{},{components:n})):a.createElement(f,p({ref:e},d))}));function f(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var o=n.length,p=new Array(o);p[0]=c;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[s]="string"==typeof t?t:r,p[1]=i;for(var u=2;u{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>p,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var a=n(7462),r=(n(7294),n(3905));const o={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},p=void 0,i={unversionedId:"kubeflow/advanced-component",id:"kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/docs/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/docs/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/docs/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/docs/kubeflow/advanced-environment"}},l={},u=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule to use InputPath/OutputPath",id:"rule-to-use-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],d={toc:u},s="wrapper";function m(t){let{components:e,...n}=t;return(0,r.kt)(s,(0,a.Z)({},d,n,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," \uc608\uc2dc\ub85c \ub098\uc654\ub358 \ucf54\ub4dc\ub97c \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"\uc544\ub798 \ucf54\ub4dc\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"\uc5d0\uc11c \uc0ac\uc6a9\ud588\ub358 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uacfc \ud568\uaed8 \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"../kubeflow/basic-component"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud560 \ub54c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0 \ub300\ud55c \ud0c0\uc785 \ud78c\ud2b8\ub97c \uc801\uc5b4\uc57c \ud55c\ub2e4\uace0 \uc124\uba85 \ud588\uc5c8\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d json\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8 \ud0c0\uc785\uc774 \uc544\ub2cc dataframe, model\uc640 \uac19\uc774 \ubcf5\uc7a1\ud55c \uac1d\uccb4\ub4e4\uc740 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?"),(0,r.kt)("p",null,"\ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\uac04\uc5d0 \uac12\uc744 \uc804\ub2ec\ud560 \ub54c, \uac1d\uccb4\ub97c \ubc18\ud658\ud574\ub3c4 \uadf8 \uac12\uc774 \ud638\uc2a4\ud2b8\uc758 \uba54\ubaa8\ub9ac\uc5d0 \uc800\uc7a5\ub418\uc5b4 \uc788\uc73c\ubbc0\ub85c \ub2e4\uc74c \ud568\uc218\uc5d0\uc11c\ub3c4 \uac19\uc740 \uac1d\uccb4\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc740 \uac01\uac01 \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \uc11c\ub85c \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \uac19\uc740 \uba54\ubaa8\ub9ac\ub97c \uacf5\uc720\ud558\uace0 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ubcf4\ud1b5\uc758 \ud30c\uc774\uc36c \ud568\uc218\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ubc29\uc2dd\uacfc \uac19\uc774 \uac1d\uccb4\ub97c \uc804\ub2ec\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \ub118\uaca8 \uc904 \uc218 \uc788\ub294 \uc815\ubcf4\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"json")," \uc73c\ub85c\ub9cc \uac00\ub2a5\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c Model\uc774\ub098 DataFrame\uacfc \uac19\uc774 json \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud560 \uc218 \uc5c6\ub294 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \ub2e4\ub978 \ubc29\ubc95\uc744 \ud1b5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574 json-serializable \ud558\uc9c0 \uc54a\uc740 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \uba54\ubaa8\ub9ac \ub300\uc2e0 \ud30c\uc77c\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud55c \ub4a4, \uadf8 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc815\ubcf4\ub97c \uc804\ub2ec\ud569\ub2c8\ub2e4. \uc800\uc7a5\ub41c \ud30c\uc77c\uc758 \uacbd\ub85c\ub294 str\uc774\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \uc804\ub2ec\ud560 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 kubeflow\uc5d0\uc11c\ub294 minio\ub97c \uc774\uc6a9\ud574 \ud30c\uc77c\uc744 \uc800\uc7a5\ud558\ub294\ub370 \uc720\uc800\ub294 \uc2e4\ud589\uc744 \ud558\uae30 \uc804\uc5d0\ub294 \uac01 \ud30c\uc77c\uc758 \uacbd\ub85c\ub97c \uc54c \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \uc774\ub97c \uc704\ud574\uc11c kubeflow\uc5d0\uc11c\ub294 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \ub9e4\uc9c1\uc744 \uc81c\uacf5\ud558\ub294\ub370 \ubc14\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \uc785\ub825 \uacbd\ub85c\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \ucd9c\ub825 \uacbd\ub85c\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()"),"\ub97c argument\ub85c \ub9cc\ub4ed\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ub370\uc774\ud130\ub97c \ubc1b\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()"),"\uc744 argument\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ub807\uac8c \ub9cc\ub4e0 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc11c\ub85c \uc5f0\uacb0\uc744 \ud558\uba74 kubeflow\uc5d0\uc11c \ud544\uc694\ud55c \uacbd\ub85c\ub97c \uc790\ub3d9\uc73c\ub85c \uc0dd\uc131\ud6c4 \uc785\ub825\ud574 \uc8fc\uae30 \ub54c\ubb38\uc5d0 \ub354 \uc774\uc0c1 \uc720\uc800\ub294 \uacbd\ub85c\ub97c \uc2e0\uacbd\uc4f0\uc9c0 \uc54a\uace0 \ucef4\ud3ec\ub10c\ud2b8\uac04\uc758 \uad00\uacc4\ub9cc \uc2e0\uacbd\uc4f0\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc774 \ub0b4\uc6a9\uc744 \ubc14\ud0d5\uc73c\ub85c \ub2e4\uc2dc \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,"InputPath\ub098 OutputPath\ub294 string\uc744 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 string\uc740 \uc785\ub825 \ub610\ub294 \ucd9c\ub825\ud558\ub824\uace0 \ud558\ub294 \ud30c\uc77c\uc758 \ud3ec\ub9f7\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\ub2e4\uace0 \uaf2d \uc774 \ud3ec\ub9f7\uc73c\ub85c \ud30c\uc77c \ud615\ud0dc\ub85c \uc800\uc7a5\uc774 \uac15\uc81c\ub418\ub294 \uac83\uc740 \uc544\ub2d9\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \ucef4\ud30c\uc77c\ud560 \ub54c \ucd5c\uc18c\ud55c\uc758 \ud0c0\uc785 \uccb4\ud06c\ub97c \uc704\ud55c \ub3c4\uc6b0\ubbf8 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ud30c\uc77c \ud3ec\ub9f7\uc774 \uace0\uc815\ub418\uc9c0 \uc54a\ub294\ub2e4\uba74 \uc785\ub825\ud558\uc9c0 \uc54a\uc73c\uba74 \ub429\ub2c8\ub2e4 (\ud0c0\uc785 \ud78c\ud2b8 \uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"Any")," \uc640 \uac19\uc740 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4)."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"\uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"rule-to-use-inputpathoutputpath"},"Rule to use InputPath/OutputPath"),(0,r.kt)("p",null,"InputPath\ub098 OutputPath argument\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud560 \ub54c \uc9c0\ucf1c\uc57c\ud558\ub294 \uaddc\uce59\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub370\uc774\ud130\uac00 \ud544\uc694\ud558\ubbc0\ub85c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,r.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,r.kt)("p",null,"\ud55c \uac00\uc9c0 \uc774\uc0c1\ud55c \uc810\uc744 \ud655\uc778\ud558\uc168\ub098\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\ubc14\ub85c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0\uc11c \ubc1b\ub294 argument\uc911 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \uac83\ub4e4\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]')," \uac00 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]')," \uc73c\ub85c \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 kubeflow\uc5d0\uc11c \uc815\ud55c \ubc95\uce59\uc73c\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc73c\ub85c \uc0dd\uc131\ub41c \uacbd\ub85c\ub4e4\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc811\uadfc\ud560 \ub54c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\ub97c \uc0dd\ub7b5\ud558\uc5ec \uc811\uadfc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\ub9cc \ubc29\uae08 \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uacbd\uc6b0 \uc2e4\ud589\uc774 \ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc774\uc720\ub294 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2676],{3905:(t,e,n)=>{n.d(e,{Zo:()=>d,kt:()=>f});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function p(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),u=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):p(p({},e),t)),n},d=function(t){var e=u(t.components);return a.createElement(l.Provider,{value:e},t.children)},s="mdxType",m={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},c=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,o=t.originalType,l=t.parentName,d=i(t,["components","mdxType","originalType","parentName"]),s=u(n),c=r,f=s["".concat(l,".").concat(c)]||s[c]||m[c]||o;return n?a.createElement(f,p(p({ref:e},d),{},{components:n})):a.createElement(f,p({ref:e},d))}));function f(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var o=n.length,p=new Array(o);p[0]=c;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[s]="string"==typeof t?t:r,p[1]=i;for(var u=2;u{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>p,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var a=n(7462),r=(n(7294),n(3905));const o={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},p=void 0,i={unversionedId:"kubeflow/advanced-component",id:"kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/docs/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/docs/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/docs/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/docs/kubeflow/advanced-environment"}},l={},u=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule to use InputPath/OutputPath",id:"rule-to-use-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],d={toc:u},s="wrapper";function m(t){let{components:e,...n}=t;return(0,r.kt)(s,(0,a.Z)({},d,n,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," \uc608\uc2dc\ub85c \ub098\uc654\ub358 \ucf54\ub4dc\ub97c \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"\uc544\ub798 \ucf54\ub4dc\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"\uc5d0\uc11c \uc0ac\uc6a9\ud588\ub358 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uacfc \ud568\uaed8 \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"../kubeflow/basic-component"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud560 \ub54c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0 \ub300\ud55c \ud0c0\uc785 \ud78c\ud2b8\ub97c \uc801\uc5b4\uc57c \ud55c\ub2e4\uace0 \uc124\uba85 \ud588\uc5c8\uc2b5\ub2c8\ub2e4. \uadf8\ub7f0\ub370 \ub9cc\uc57d json\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8 \ud0c0\uc785\uc774 \uc544\ub2cc dataframe, model\uc640 \uac19\uc774 \ubcf5\uc7a1\ud55c \uac1d\uccb4\ub4e4\uc740 \uc5b4\ub5bb\uac8c \ud560\uae4c\uc694?"),(0,r.kt)("p",null,"\ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\uac04\uc5d0 \uac12\uc744 \uc804\ub2ec\ud560 \ub54c, \uac1d\uccb4\ub97c \ubc18\ud658\ud574\ub3c4 \uadf8 \uac12\uc774 \ud638\uc2a4\ud2b8\uc758 \uba54\ubaa8\ub9ac\uc5d0 \uc800\uc7a5\ub418\uc5b4 \uc788\uc73c\ubbc0\ub85c \ub2e4\uc74c \ud568\uc218\uc5d0\uc11c\ub3c4 \uac19\uc740 \uac1d\uccb4\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc740 \uac01\uac01 \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \uc11c\ub85c \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \uac19\uc740 \uba54\ubaa8\ub9ac\ub97c \uacf5\uc720\ud558\uace0 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0, \ubcf4\ud1b5\uc758 \ud30c\uc774\uc36c \ud568\uc218\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ubc29\uc2dd\uacfc \uac19\uc774 \uac1d\uccb4\ub97c \uc804\ub2ec\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \ub118\uaca8 \uc904 \uc218 \uc788\ub294 \uc815\ubcf4\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"json")," \uc73c\ub85c\ub9cc \uac00\ub2a5\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c Model\uc774\ub098 DataFrame\uacfc \uac19\uc774 json \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud560 \uc218 \uc5c6\ub294 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \ub2e4\ub978 \ubc29\ubc95\uc744 \ud1b5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \uc774\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574 json-serializable \ud558\uc9c0 \uc54a\uc740 \ud0c0\uc785\uc758 \uac1d\uccb4\ub294 \uba54\ubaa8\ub9ac \ub300\uc2e0 \ud30c\uc77c\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud55c \ub4a4, \uadf8 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc815\ubcf4\ub97c \uc804\ub2ec\ud569\ub2c8\ub2e4. \uc800\uc7a5\ub41c \ud30c\uc77c\uc758 \uacbd\ub85c\ub294 str\uc774\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc5d0 \uc804\ub2ec\ud560 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4. \uadf8\ub7f0\ub370 kubeflow\uc5d0\uc11c\ub294 minio\ub97c \uc774\uc6a9\ud574 \ud30c\uc77c\uc744 \uc800\uc7a5\ud558\ub294\ub370 \uc720\uc800\ub294 \uc2e4\ud589\uc744 \ud558\uae30 \uc804\uc5d0\ub294 \uac01 \ud30c\uc77c\uc758 \uacbd\ub85c\ub97c \uc54c \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. \uc774\ub97c \uc704\ud574\uc11c kubeflow\uc5d0\uc11c\ub294 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \ub9e4\uc9c1\uc744 \uc81c\uacf5\ud558\ub294\ub370 \ubc14\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"InputPath"),"\ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \uc785\ub825 \uacbd\ub85c\ub97c ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \ub294 \ub2e8\uc5b4 \uadf8\ub300\ub85c \ucd9c\ub825 \uacbd\ub85c\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()"),"\ub97c argument\ub85c \ub9cc\ub4ed\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ub370\uc774\ud130\ub97c \ubc1b\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()"),"\uc744 argument\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ub807\uac8c \ub9cc\ub4e0 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc11c\ub85c \uc5f0\uacb0\uc744 \ud558\uba74 kubeflow\uc5d0\uc11c \ud544\uc694\ud55c \uacbd\ub85c\ub97c \uc790\ub3d9\uc73c\ub85c \uc0dd\uc131\ud6c4 \uc785\ub825\ud574 \uc8fc\uae30 \ub54c\ubb38\uc5d0 \ub354 \uc774\uc0c1 \uc720\uc800\ub294 \uacbd\ub85c\ub97c \uc2e0\uacbd\uc4f0\uc9c0 \uc54a\uace0 \ucef4\ud3ec\ub10c\ud2b8\uac04\uc758 \uad00\uacc4\ub9cc \uc2e0\uacbd\uc4f0\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc774 \ub0b4\uc6a9\uc744 \ubc14\ud0d5\uc73c\ub85c \ub2e4\uc2dc \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,"InputPath\ub098 OutputPath\ub294 string\uc744 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 string\uc740 \uc785\ub825 \ub610\ub294 \ucd9c\ub825\ud558\ub824\uace0 \ud558\ub294 \ud30c\uc77c\uc758 \ud3ec\ub9f7\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\ub2e4\uace0 \uaf2d \uc774 \ud3ec\ub9f7\uc73c\ub85c \ud30c\uc77c \ud615\ud0dc\ub85c \uc800\uc7a5\uc774 \uac15\uc81c\ub418\ub294 \uac83\uc740 \uc544\ub2d9\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \ucef4\ud30c\uc77c\ud560 \ub54c \ucd5c\uc18c\ud55c\uc758 \ud0c0\uc785 \uccb4\ud06c\ub97c \uc704\ud55c \ub3c4\uc6b0\ubbf8 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ud30c\uc77c \ud3ec\ub9f7\uc774 \uace0\uc815\ub418\uc9c0 \uc54a\ub294\ub2e4\uba74 \uc785\ub825\ud558\uc9c0 \uc54a\uc73c\uba74 \ub429\ub2c8\ub2e4 (\ud0c0\uc785 \ud78c\ud2b8 \uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"Any")," \uc640 \uac19\uc740 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4)."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"\uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("h2",{id:"rule-to-use-inputpathoutputpath"},"Rule to use InputPath/OutputPath"),(0,r.kt)("p",null,"InputPath\ub098 OutputPath argument\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud560 \ub54c \uc9c0\ucf1c\uc57c\ud558\ub294 \uaddc\uce59\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub370\uc774\ud130\uac00 \ud544\uc694\ud558\ubbc0\ub85c \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,r.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,r.kt)("p",null,"\ud55c \uac00\uc9c0 \uc774\uc0c1\ud55c \uc810\uc744 \ud655\uc778\ud558\uc168\ub098\uc694?",(0,r.kt)("br",{parentName:"p"}),"\n","\ubc14\ub85c \uc785\ub825\uacfc \ucd9c\ub825\uc5d0\uc11c \ubc1b\ub294 argument\uc911 \uacbd\ub85c\uc640 \uad00\ub828\ub41c \uac83\ub4e4\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]')," \uac00 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]')," \uc73c\ub85c \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 kubeflow\uc5d0\uc11c \uc815\ud55c \ubc95\uce59\uc73c\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"InputPath")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"OutputPath")," \uc73c\ub85c \uc0dd\uc131\ub41c \uacbd\ub85c\ub4e4\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc811\uadfc\ud560 \ub54c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"_path")," \uc811\ubbf8\uc0ac\ub97c \uc0dd\ub7b5\ud558\uc5ec \uc811\uadfc\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\ub9cc \ubc29\uae08 \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uacbd\uc6b0 \uc2e4\ud589\uc774 \ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.\n\uc774\uc720\ub294 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8061fab4.61253720.js b/assets/js/8061fab4.45faf595.js similarity index 99% rename from assets/js/8061fab4.61253720.js rename to assets/js/8061fab4.45faf595.js index c8c35efa..e0f556b8 100644 --- a/assets/js/8061fab4.61253720.js +++ b/assets/js/8061fab4.45faf595.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1044],{3905:(e,a,t)=>{t.d(a,{Zo:()=>p,kt:()=>b});var r=t(7294);function n(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function o(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);a&&(r=r.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var a=1;a=0||(n[t]=e[t]);return n}(e,a);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var l=r.createContext({}),c=function(e){var a=r.useContext(l),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},p=function(e){var a=c(e.components);return r.createElement(l.Provider,{value:a},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return r.createElement(r.Fragment,{},a)}},k=r.forwardRef((function(e,a){var t=e.components,n=e.mdxType,o=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),u=c(t),k=n,b=u["".concat(l,".").concat(k)]||u[k]||d[k]||o;return t?r.createElement(b,i(i({ref:a},p),{},{components:t})):r.createElement(b,i({ref:a},p))}));function b(e,a){var t=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var o=t.length,i=new Array(o);i[0]=k;var s={};for(var l in a)hasOwnProperty.call(a,l)&&(s[l]=a[l]);s.originalType=e,s[u]="string"==typeof e?e:n,i[1]=s;for(var c=2;c{t.r(a),t.d(a,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>c});var r=t(7462),n=(t(7294),t(3905));const o={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,s={unversionedId:"setup-components/install-components-kf",id:"version-1.0/setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/versioned_docs/version-1.0/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/docs/1.0/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-kf.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/docs/1.0/setup-components/install-components-mlflow"}},l={},c=[{value:"\uc124\uce58 \ud30c\uc77c \uc900\ube44",id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44",level:2},{value:"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58",id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"OIDC AuthService",id:"oidc-authservice",level:3},{value:"Kubeflow Namespace",id:"kubeflow-namespace",level:3},{value:"Kubeflow Roles",id:"kubeflow-roles",level:3},{value:"Kubeflow Istio Resources",id:"kubeflow-istio-resources",level:3},{value:"Kubeflow Pipelines",id:"kubeflow-pipelines",level:3},{value:"Katib",id:"katib",level:3},{value:"Central Dashboard",id:"central-dashboard",level:3},{value:"Admission Webhook",id:"admission-webhook",level:3},{value:"Notebooks & Jupyter Web App",id:"notebooks--jupyter-web-app",level:3},{value:"Profiles + KFAM",id:"profiles--kfam",level:3},{value:"Volumes Web App",id:"volumes-web-app",level:3},{value:"Tensorboard & Tensorboard Web App",id:"tensorboard--tensorboard-web-app",level:3},{value:"Training Operator",id:"training-operator",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:c},u="wrapper";function d(e){let{components:a,...o}=e;return(0,n.kt)(u,(0,r.Z)({},p,o,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44"},"\uc124\uce58 \ud30c\uc77c \uc900\ube44"),(0,n.kt)("p",null,"Kubeflow ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ubc84\uc804\uc744 \uc124\uce58\ud558\uae30 \uc704\ud574\uc11c, \uc124\uce58\uc5d0 \ud544\uc694\ud55c manifests \ud30c\uc77c\ub4e4\uc744 \uc900\ube44\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," \ub97c ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ud0dc\uadf8\ub85c \uae43 \ud074\ub860\ud55c \ub4a4, \ud574\ub2f9 \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,n.kt)("h2",{id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58"},"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58"),(0,n.kt)("p",null,"kubeflow/manifests Repository \uc5d0 \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58 \ucee4\ub9e8\ub4dc\uac00 \uc801\ud600\uc838 \uc788\uc9c0\ub9cc, \uc124\uce58\ud558\uba70 \ubc1c\uc0dd\ud560 \uc218 \uc788\ub294 \uc774\uc288 \ud639\uc740 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc774 \uc801\ud600\uc838 \uc788\uc9c0 \uc54a\uc544 \ucc98\uc74c \uc124\uce58\ud558\ub294 \uacbd\uc6b0 \uc5b4\ub824\uc6c0\uc744 \uacaa\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c, \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4\ub85c \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc744 \ud568\uaed8 \uc791\uc131\ud569\ub2c8\ub2e4. "),(0,n.kt)("p",null,"\ub610\ud55c, \ubcf8 \ubb38\uc11c\uc5d0\uc11c\ub294 ",(0,n.kt)("strong",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8e8\uc9c0 \uc54a\ub294 \uad6c\uc131\uc694\uc18c\uc778 Knative, KFServing, MPI Operator \uc758 \uc124\uce58\ub294 \ub9ac\uc18c\uc2a4\uc758 \ud6a8\uc728\uc801 \uc0ac\uc6a9\uc744 \uc704\ud574 \ub530\ub85c \uc124\uce58\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"cert-manager \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,n.kt)("p",{parentName:"li"},"cert-manager namespace \uc758 3 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"kubeflow-issuer \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")))),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook \uc774\uc288"),(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook deployment \uac00 Running \uc774 \uc544\ub2cc \uacbd\uc6b0, \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud558\uba70 kubeflow-issuer\uac00 \uc124\uce58\ub418\uc9c0 \uc54a\uc744 \uc218 \uc788\uc74c\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c \uacbd\uc6b0, cert-manager \uc758 3\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub418\ub294 \uac83\uc744 \ud655\uc778\ud55c \uc774\ud6c4 \ub2e4\uc2dc \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,n.kt)("h3",{id:"istio"},"Istio"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \uad00\ub828 Custom Resource Definition(CRD) \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio namespace \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,n.kt)("p",{parentName:"li"},"istio-system namespace \uc758 2 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,n.kt)("h3",{id:"dex"},"Dex"),(0,n.kt)("p",null,"dex \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,n.kt)("p",null,"auth namespace \uc758 1 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,n.kt)("h3",{id:"oidc-authservice"},"OIDC AuthService"),(0,n.kt)("p",null,"OIDC AuthService \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,n.kt)("p",null,"istio-system namespace \uc5d0 authservice-0 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,n.kt)("h3",{id:"kubeflow-namespace"},"Kubeflow Namespace"),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,n.kt)("h3",{id:"kubeflow-roles"},"Kubeflow Roles"),(0,n.kt)("p",null,"kubeflow-roles \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 6\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,n.kt)("h3",{id:"kubeflow-istio-resources"},"Kubeflow Istio Resources"),(0,n.kt)("p",null,"kubeflow-istio-resources \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 3\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,n.kt)("p",null,"Kubeflow namespace \uc5d0 gateway \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,n.kt)("h3",{id:"kubeflow-pipelines"},"Kubeflow Pipelines"),(0,n.kt)("p",null,"kubeflow pipelines \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,n.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub294 \uc5ec\ub7ec resources \ub97c \ud55c \ubc88\uc5d0 \uc124\uce58\ud558\uace0 \uc788\uc9c0\ub9cc, \uc124\uce58 \uc21c\uc11c\uc758 \uc758\uc874\uc131\uc774 \uc788\ub294 \ub9ac\uc18c\uc2a4\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub54c\uc5d0 \ub530\ub77c \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,n.kt)("p",null,"\uc704\uc640 \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c\ub2e4\uba74, 10 \ucd08 \uc815\ub3c4 \uae30\ub2e4\ub9b0 \ub4a4 \ub2e4\uc2dc \uc704\uc758 \uba85\ub839\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 16\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"pipeline-ui",src:t(8730).Z,width:"2868",height:"970"})),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"localhost \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288")),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"localhost-reject",src:t(5688).Z,width:"626",height:"406"})),(0,n.kt)("p",null,"\ub9cc\uc57d \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"localhost\uc5d0\uc11c \uc5f0\uacb0\uc744 \uac70\ubd80\ud588\uc2b5\ub2c8\ub2e4")," \ub77c\ub294 \uc5d0\ub7ec\uac00 \ucd9c\ub825\ub420 \uacbd\uc6b0, \ucee4\ub9e8\ub4dc\ub85c address \uc124\uc815\uc744 \ud1b5\ud574 \uc811\uadfc\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"\ubcf4\uc548\uc0c1\uc758 \ubb38\uc81c\uac00 \ub418\uc9c0 \uc54a\ub294\ub2e4\uba74,")," \uc544\ub798\uc640 \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"0.0.0.0")," \ub85c \ubaa8\ub4e0 \uc8fc\uc18c\uc758 bind\ub97c \uc5f4\uc5b4\uc8fc\ub294 \ubc29\ud5a5\uc73c\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"\uc704\uc758 \uc635\uc158\uc73c\ub85c \uc2e4\ud589\ud588\uc74c\uc5d0\ub3c4 \uc5ec\uc804\ud788 \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288\uac00 \ubc1c\uc0dd\ud560 \uacbd\uc6b0")),(0,n.kt)("p",null,"\ubc29\ud654\ubcbd \uc124\uc815\uc73c\ub85c \uc811\uc18d\ud574 \ubaa8\ub4e0 tcp \ud504\ub85c\ud1a0\ucf5c\uc758 \ud3ec\ud2b8\uc5d0 \ub300\ud55c \uc811\uc18d\uc744 \ud5c8\uac00 \ub610\ub294 8888\ubc88 \ud3ec\ud2b8\uc758 \uc811\uc18d \ud5c8\uac00\ub97c \ucd94\uac00\ud574 \uc811\uadfc \uad8c\ud55c\uc744 \ud5c8\uac00\ud574\uc90d\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("inlineCode",{parentName:"p"},"http://<\ub2f9\uc2e0\uc758 \uac00\uc0c1 \uc778\uc2a4\ud134\uc2a4 \uacf5\uc778 ip \uc8fc\uc18c>:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud558\uba74, ml-pipeline UI \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ud558\ub2e8\uc5d0\uc11c \uc9c4\ud589\ub418\ub294 \ub2e4\ub978 \ud3ec\ud2b8\uc758 \uacbd\ub85c\uc5d0 \uc811\uc18d\ud560 \ub54c\ub3c4 \uc704\uc758 \uc808\ucc28\uc640 \ub3d9\uc77c\ud558\uac8c \ucee4\ub9e8\ub4dc\ub97c \uc2e4\ud589\ud558\uace0, \ubc29\ud654\ubcbd\uc5d0 \ud3ec\ud2b8 \ubc88\ud638\ub97c \ucd94\uac00\ud574\uc8fc\uba74 \uc2e4\ud589\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"katib"},"Katib"),(0,n.kt)("p",null,"Katib \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c katib UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"katib-ui",src:t(5606).Z,width:"2146",height:"620"})),(0,n.kt)("h3",{id:"central-dashboard"},"Central Dashboard"),(0,n.kt)("p",null,"Dashboard \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,n.kt)("p",null,"kubeflow namespace \uc5d0 centraldashboard \uad00\ub828 1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c Central Dashboard UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(9536).Z,width:"4982",height:"1548"})),(0,n.kt)("h3",{id:"admission-webhook"},"Admission Webhook"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,n.kt)("h3",{id:"notebooks--jupyter-web-app"},"Notebooks & Jupyter Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Notebook controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created\nserviceaccount/notebook-controller-service-account created\nrole.rbac.authorization.k8s.io/notebook-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-role created\nrolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created\nconfigmap/notebook-controller-config-m44cmb547t created\nservice/notebook-controller-service created\ndeployment.apps/notebook-controller-deployment created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep notebook-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"notebook-controller-deployment-75b4f7b578-w4d4l 1/1 Running 0 105s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Jupyter Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/jupyter-web-app-service-account created\nrole.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created\nrolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created\nclusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep jupyter-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1\uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"jupyter-web-app-deployment-6f744fbc54-p27ts 1/1 Running 0 2m\n")))),(0,n.kt)("h3",{id:"profiles--kfam"},"Profiles + KFAM"),(0,n.kt)("p",null,"Profile Controller\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,n.kt)("h3",{id:"volumes-web-app"},"Volumes Web App"),(0,n.kt)("p",null,"Volumes Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,n.kt)("p",null,"1\uac1c\uc758 pod\uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,n.kt)("h3",{id:"tensorboard--tensorboard-web-app"},"Tensorboard & Tensorboard Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Web App \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/tensorboards-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created\nconfigmap/tensorboards-web-app-parameters-g28fbd6cch created\nservice/tensorboards-web-app-service created\ndeployment.apps/tensorboards-web-app-deployment created\nvirtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboards-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboards-web-app-deployment-6ff79b7f44-qbzmw 1/1 Running 0 22s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created\nserviceaccount/tensorboard-controller created\nrole.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created\nrolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created\nconfigmap/tensorboard-controller-config-bf88mm96c8 created\nservice/tensorboard-controller-controller-manager-metrics-service created\ndeployment.apps/tensorboard-controller-controller-manager created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboard-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboard-controller-controller-manager-954b7c544-vjpzj 3/3 Running 1 73s\n")))),(0,n.kt)("h3",{id:"training-operator"},"Training Operator"),(0,n.kt)("p",null,"Training Operator \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,n.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,n.kt)("p",null,"Kubeflow \uc0ac\uc6a9\uc744 \uc704\ud574, \uc0ac\uc6a9\ud560 User\uc758 Kubeflow Profile \uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,n.kt)("p",null,"kubeflow-user-example-com profile \uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,n.kt)("h2",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,n.kt)("p",null,"Kubeflow central dashboard\uc5d0 web browser\ub85c \uc811\uc18d\ud558\uae30 \uc704\ud574 \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,n.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"login-ui",src:t(1417).Z,width:"2554",height:"1202"})),(0,n.kt)("p",null,"\ub2e4\uc74c \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"Email Address: ",(0,n.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,n.kt)("li",{parentName:"ul"},"Password: ",(0,n.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(3733).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},3733:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},9536:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/central-dashboard-ddf80e24ff9066a7e3fdbfd0d58b5721.png"},5606:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/katib-ui-f10efe0ffd3bb57b1de7bdc2ff2aa880.png"},5688:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/localhost-reject-8d0b59ff30048e97d5721f786f25c857.png"},1417:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"},8730:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/pipeline-ui-796868a1ebeabfd6d1b6eb9b54c389aa.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1044],{3905:(e,a,t)=>{t.d(a,{Zo:()=>p,kt:()=>b});var r=t(7294);function n(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function o(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);a&&(r=r.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var a=1;a=0||(n[t]=e[t]);return n}(e,a);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var l=r.createContext({}),c=function(e){var a=r.useContext(l),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},p=function(e){var a=c(e.components);return r.createElement(l.Provider,{value:a},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return r.createElement(r.Fragment,{},a)}},k=r.forwardRef((function(e,a){var t=e.components,n=e.mdxType,o=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),u=c(t),k=n,b=u["".concat(l,".").concat(k)]||u[k]||d[k]||o;return t?r.createElement(b,i(i({ref:a},p),{},{components:t})):r.createElement(b,i({ref:a},p))}));function b(e,a){var t=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var o=t.length,i=new Array(o);i[0]=k;var s={};for(var l in a)hasOwnProperty.call(a,l)&&(s[l]=a[l]);s.originalType=e,s[u]="string"==typeof e?e:n,i[1]=s;for(var c=2;c{t.r(a),t.d(a,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>c});var r=t(7462),n=(t(7294),t(3905));const o={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,s={unversionedId:"setup-components/install-components-kf",id:"version-1.0/setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/versioned_docs/version-1.0/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/docs/1.0/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-kf.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/docs/1.0/setup-components/install-components-mlflow"}},l={},c=[{value:"\uc124\uce58 \ud30c\uc77c \uc900\ube44",id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44",level:2},{value:"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58",id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"OIDC AuthService",id:"oidc-authservice",level:3},{value:"Kubeflow Namespace",id:"kubeflow-namespace",level:3},{value:"Kubeflow Roles",id:"kubeflow-roles",level:3},{value:"Kubeflow Istio Resources",id:"kubeflow-istio-resources",level:3},{value:"Kubeflow Pipelines",id:"kubeflow-pipelines",level:3},{value:"Katib",id:"katib",level:3},{value:"Central Dashboard",id:"central-dashboard",level:3},{value:"Admission Webhook",id:"admission-webhook",level:3},{value:"Notebooks & Jupyter Web App",id:"notebooks--jupyter-web-app",level:3},{value:"Profiles + KFAM",id:"profiles--kfam",level:3},{value:"Volumes Web App",id:"volumes-web-app",level:3},{value:"Tensorboard & Tensorboard Web App",id:"tensorboard--tensorboard-web-app",level:3},{value:"Training Operator",id:"training-operator",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:c},u="wrapper";function d(e){let{components:a,...o}=e;return(0,n.kt)(u,(0,r.Z)({},p,o,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44"},"\uc124\uce58 \ud30c\uc77c \uc900\ube44"),(0,n.kt)("p",null,"Kubeflow ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ubc84\uc804\uc744 \uc124\uce58\ud558\uae30 \uc704\ud574\uc11c, \uc124\uce58\uc5d0 \ud544\uc694\ud55c manifests \ud30c\uc77c\ub4e4\uc744 \uc900\ube44\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," \ub97c ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ud0dc\uadf8\ub85c \uae43 \ud074\ub860\ud55c \ub4a4, \ud574\ub2f9 \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,n.kt)("h2",{id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58"},"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58"),(0,n.kt)("p",null,"kubeflow/manifests Repository \uc5d0 \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58 \ucee4\ub9e8\ub4dc\uac00 \uc801\ud600\uc838 \uc788\uc9c0\ub9cc, \uc124\uce58\ud558\uba70 \ubc1c\uc0dd\ud560 \uc218 \uc788\ub294 \uc774\uc288 \ud639\uc740 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc774 \uc801\ud600\uc838 \uc788\uc9c0 \uc54a\uc544 \ucc98\uc74c \uc124\uce58\ud558\ub294 \uacbd\uc6b0 \uc5b4\ub824\uc6c0\uc744 \uacaa\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c, \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4\ub85c \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc744 \ud568\uaed8 \uc791\uc131\ud569\ub2c8\ub2e4. "),(0,n.kt)("p",null,"\ub610\ud55c, \ubcf8 \ubb38\uc11c\uc5d0\uc11c\ub294 ",(0,n.kt)("strong",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8e8\uc9c0 \uc54a\ub294 \uad6c\uc131\uc694\uc18c\uc778 Knative, KFServing, MPI Operator \uc758 \uc124\uce58\ub294 \ub9ac\uc18c\uc2a4\uc758 \ud6a8\uc728\uc801 \uc0ac\uc6a9\uc744 \uc704\ud574 \ub530\ub85c \uc124\uce58\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"cert-manager \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,n.kt)("p",{parentName:"li"},"cert-manager namespace \uc758 3 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"kubeflow-issuer \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")))),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook \uc774\uc288"),(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook deployment \uac00 Running \uc774 \uc544\ub2cc \uacbd\uc6b0, \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud558\uba70 kubeflow-issuer\uac00 \uc124\uce58\ub418\uc9c0 \uc54a\uc744 \uc218 \uc788\uc74c\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c \uacbd\uc6b0, cert-manager \uc758 3\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub418\ub294 \uac83\uc744 \ud655\uc778\ud55c \uc774\ud6c4 \ub2e4\uc2dc \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,n.kt)("h3",{id:"istio"},"Istio"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \uad00\ub828 Custom Resource Definition(CRD) \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio namespace \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,n.kt)("p",{parentName:"li"},"istio-system namespace \uc758 2 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,n.kt)("h3",{id:"dex"},"Dex"),(0,n.kt)("p",null,"dex \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,n.kt)("p",null,"auth namespace \uc758 1 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,n.kt)("h3",{id:"oidc-authservice"},"OIDC AuthService"),(0,n.kt)("p",null,"OIDC AuthService \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,n.kt)("p",null,"istio-system namespace \uc5d0 authservice-0 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,n.kt)("h3",{id:"kubeflow-namespace"},"Kubeflow Namespace"),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,n.kt)("h3",{id:"kubeflow-roles"},"Kubeflow Roles"),(0,n.kt)("p",null,"kubeflow-roles \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 6\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,n.kt)("h3",{id:"kubeflow-istio-resources"},"Kubeflow Istio Resources"),(0,n.kt)("p",null,"kubeflow-istio-resources \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 3\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,n.kt)("p",null,"Kubeflow namespace \uc5d0 gateway \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,n.kt)("h3",{id:"kubeflow-pipelines"},"Kubeflow Pipelines"),(0,n.kt)("p",null,"kubeflow pipelines \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,n.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub294 \uc5ec\ub7ec resources \ub97c \ud55c \ubc88\uc5d0 \uc124\uce58\ud558\uace0 \uc788\uc9c0\ub9cc, \uc124\uce58 \uc21c\uc11c\uc758 \uc758\uc874\uc131\uc774 \uc788\ub294 \ub9ac\uc18c\uc2a4\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub54c\uc5d0 \ub530\ub77c \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,n.kt)("p",null,"\uc704\uc640 \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c\ub2e4\uba74, 10 \ucd08 \uc815\ub3c4 \uae30\ub2e4\ub9b0 \ub4a4 \ub2e4\uc2dc \uc704\uc758 \uba85\ub839\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 16\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"pipeline-ui",src:t(8730).Z,width:"2868",height:"970"})),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"localhost \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288")),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"localhost-reject",src:t(5688).Z,width:"626",height:"406"})),(0,n.kt)("p",null,"\ub9cc\uc57d \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"localhost\uc5d0\uc11c \uc5f0\uacb0\uc744 \uac70\ubd80\ud588\uc2b5\ub2c8\ub2e4")," \ub77c\ub294 \uc5d0\ub7ec\uac00 \ucd9c\ub825\ub420 \uacbd\uc6b0, \ucee4\ub9e8\ub4dc\ub85c address \uc124\uc815\uc744 \ud1b5\ud574 \uc811\uadfc\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"\ubcf4\uc548\uc0c1\uc758 \ubb38\uc81c\uac00 \ub418\uc9c0 \uc54a\ub294\ub2e4\uba74,")," \uc544\ub798\uc640 \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"0.0.0.0")," \ub85c \ubaa8\ub4e0 \uc8fc\uc18c\uc758 bind\ub97c \uc5f4\uc5b4\uc8fc\ub294 \ubc29\ud5a5\uc73c\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"\uc704\uc758 \uc635\uc158\uc73c\ub85c \uc2e4\ud589\ud588\uc74c\uc5d0\ub3c4 \uc5ec\uc804\ud788 \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288\uac00 \ubc1c\uc0dd\ud560 \uacbd\uc6b0")),(0,n.kt)("p",null,"\ubc29\ud654\ubcbd \uc124\uc815\uc73c\ub85c \uc811\uc18d\ud574 \ubaa8\ub4e0 tcp \ud504\ub85c\ud1a0\ucf5c\uc758 \ud3ec\ud2b8\uc5d0 \ub300\ud55c \uc811\uc18d\uc744 \ud5c8\uac00 \ub610\ub294 8888\ubc88 \ud3ec\ud2b8\uc758 \uc811\uc18d \ud5c8\uac00\ub97c \ucd94\uac00\ud574 \uc811\uadfc \uad8c\ud55c\uc744 \ud5c8\uac00\ud574\uc90d\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("inlineCode",{parentName:"p"},"http://<\ub2f9\uc2e0\uc758 \uac00\uc0c1 \uc778\uc2a4\ud134\uc2a4 \uacf5\uc778 ip \uc8fc\uc18c>:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud558\uba74, ml-pipeline UI \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ud558\ub2e8\uc5d0\uc11c \uc9c4\ud589\ub418\ub294 \ub2e4\ub978 \ud3ec\ud2b8\uc758 \uacbd\ub85c\uc5d0 \uc811\uc18d\ud560 \ub54c\ub3c4 \uc704\uc758 \uc808\ucc28\uc640 \ub3d9\uc77c\ud558\uac8c \ucee4\ub9e8\ub4dc\ub97c \uc2e4\ud589\ud558\uace0, \ubc29\ud654\ubcbd\uc5d0 \ud3ec\ud2b8 \ubc88\ud638\ub97c \ucd94\uac00\ud574\uc8fc\uba74 \uc2e4\ud589\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"katib"},"Katib"),(0,n.kt)("p",null,"Katib \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c katib UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"katib-ui",src:t(5606).Z,width:"2146",height:"620"})),(0,n.kt)("h3",{id:"central-dashboard"},"Central Dashboard"),(0,n.kt)("p",null,"Dashboard \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,n.kt)("p",null,"kubeflow namespace \uc5d0 centraldashboard \uad00\ub828 1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c Central Dashboard UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(9536).Z,width:"4982",height:"1548"})),(0,n.kt)("h3",{id:"admission-webhook"},"Admission Webhook"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,n.kt)("h3",{id:"notebooks--jupyter-web-app"},"Notebooks & Jupyter Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Notebook controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created\nserviceaccount/notebook-controller-service-account created\nrole.rbac.authorization.k8s.io/notebook-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-role created\nrolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created\nconfigmap/notebook-controller-config-m44cmb547t created\nservice/notebook-controller-service created\ndeployment.apps/notebook-controller-deployment created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep notebook-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"notebook-controller-deployment-75b4f7b578-w4d4l 1/1 Running 0 105s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Jupyter Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/jupyter-web-app-service-account created\nrole.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created\nrolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created\nclusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep jupyter-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1\uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"jupyter-web-app-deployment-6f744fbc54-p27ts 1/1 Running 0 2m\n")))),(0,n.kt)("h3",{id:"profiles--kfam"},"Profiles + KFAM"),(0,n.kt)("p",null,"Profile Controller\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,n.kt)("h3",{id:"volumes-web-app"},"Volumes Web App"),(0,n.kt)("p",null,"Volumes Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,n.kt)("p",null,"1\uac1c\uc758 pod\uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,n.kt)("h3",{id:"tensorboard--tensorboard-web-app"},"Tensorboard & Tensorboard Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Web App \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/tensorboards-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created\nconfigmap/tensorboards-web-app-parameters-g28fbd6cch created\nservice/tensorboards-web-app-service created\ndeployment.apps/tensorboards-web-app-deployment created\nvirtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboards-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboards-web-app-deployment-6ff79b7f44-qbzmw 1/1 Running 0 22s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created\nserviceaccount/tensorboard-controller created\nrole.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created\nrolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created\nconfigmap/tensorboard-controller-config-bf88mm96c8 created\nservice/tensorboard-controller-controller-manager-metrics-service created\ndeployment.apps/tensorboard-controller-controller-manager created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboard-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboard-controller-controller-manager-954b7c544-vjpzj 3/3 Running 1 73s\n")))),(0,n.kt)("h3",{id:"training-operator"},"Training Operator"),(0,n.kt)("p",null,"Training Operator \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,n.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,n.kt)("p",null,"Kubeflow \uc0ac\uc6a9\uc744 \uc704\ud574, \uc0ac\uc6a9\ud560 User\uc758 Kubeflow Profile \uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,n.kt)("p",null,"kubeflow-user-example-com profile \uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,n.kt)("h2",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,n.kt)("p",null,"Kubeflow central dashboard\uc5d0 web browser\ub85c \uc811\uc18d\ud558\uae30 \uc704\ud574 \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,n.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"login-ui",src:t(1417).Z,width:"2554",height:"1202"})),(0,n.kt)("p",null,"\ub2e4\uc74c \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"Email Address: ",(0,n.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,n.kt)("li",{parentName:"ul"},"Password: ",(0,n.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(3733).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},3733:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},9536:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/central-dashboard-ddf80e24ff9066a7e3fdbfd0d58b5721.png"},5606:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/katib-ui-f10efe0ffd3bb57b1de7bdc2ff2aa880.png"},5688:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/localhost-reject-8d0b59ff30048e97d5721f786f25c857.png"},1417:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"},8730:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/pipeline-ui-796868a1ebeabfd6d1b6eb9b54c389aa.png"}}]); \ No newline at end of file diff --git a/assets/js/838277dc.cec0c92e.js b/assets/js/838277dc.544ec3e2.js similarity index 99% rename from assets/js/838277dc.cec0c92e.js rename to assets/js/838277dc.544ec3e2.js index fe13dee9..b68bf49e 100644 --- a/assets/js/838277dc.cec0c92e.js +++ b/assets/js/838277dc.544ec3e2.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9683],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=o.createContext({}),c=function(e){var t=o.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=c(e.components);return o.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,s=a(e,["components","mdxType","originalType","parentName"]),u=c(n),f=r,m=u["".concat(p,".").concat(f)]||u[f]||d[f]||l;return n?o.createElement(m,i(i({ref:t},s),{},{components:n})):o.createElement(m,i({ref:t},s))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=n.length,i=new Array(l);i[0]=f;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:r,i[1]=a;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>l,metadata:()=>a,toc:()=>c});var o=n(7462),r=(n(7294),n(3905));const l={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},i=void 0,a={unversionedId:"kubeflow/kubeflow-concepts",id:"version-1.0/kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/docs/1.0/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-concepts.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/docs/1.0/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/docs/1.0/kubeflow/basic-requirements"}},p={},c=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:c},u="wrapper";function d(e){let{components:t,...l}=e;return(0,r.kt)(u,(0,o.Z)({},s,l,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component"},"Component"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component contents)\uc640 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component wrapper)\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ud1b5\ud574 kubeflow\uc5d0 \uc804\ub2ec\ub418\uba70 \uc804\ub2ec\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589(execute)\ud558\uace0 \uc544\ud2f0\ud329\ud2b8(artifacts)\ub4e4\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-0.png",src:n(5235).Z,width:"1392",height:"704"})),(0,r.kt)("h3",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uad6c\uc131\ud558\ub294 \uac83\uc740 \ucd1d 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-1.png",src:n(8694).Z,width:"574",height:"436"})),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Environemnt"),(0,r.kt)("li",{parentName:"ol"},"Python code w\\ Config"),(0,r.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,r.kt)("p",null,"\uc608\uc2dc\uc640 \ud568\uaed8 \uac01 \uad6c\uc131 \uc694\uc18c\uac00 \uc5b4\ub5a4 \uac83\uc778\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub2e4\uc74c\uacfc \uac19\uc774 \ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc640 SVC(Support Vector Classifier)\ub97c \ud559\uc2b5\ud55c \ud6c4 SVC \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc744 \uc801\uc740 \ud30c\uc774\uc36c \ucf54\ub4dc\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,"\uc704\uc758 \ud30c\uc774\uc36c \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub85c \ub098\ub20c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-2.png",src:n(2029).Z,width:"832",height:"410"})),(0,r.kt)("p",null,"Environment\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\ub4e4\uc744 import\ud558\ub294 \ubd80\ubd84\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uc73c\ub85c Python Code w\\ Config \uc5d0\uc11c\ub294 \uc8fc\uc5b4\uc9c4 Config\ub97c \uc774\uc6a9\ud574 \uc2e4\uc81c\ub85c \ud559\uc2b5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub9c8\uc9c0\ub9c9\uc73c\ub85c \uc544\ud2f0\ud329\ud2b8\ub97c \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0 \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud558\uace0 \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc791\uc5c5\uc744 \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-3.png",src:n(2335).Z,width:"1066",height:"766"})),(0,r.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc704\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv"),"\uc640 \uac19\uc774 \ud568\uc218\uc758 \ud615\ud0dc\ub85c \uc815\uc758\ud569\ub2c8\ub2e4.\n\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \ucf58\ud150\uce20\ub97c \uac10\uc2f8\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-4.png",src:n(6425).Z,width:"464",height:"826"})),(0,r.kt)("h3",{id:"artifacts"},"Artifacts"),(0,r.kt)("p",null,"\uc704\uc758 \uc124\uba85\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc544\ud2f0\ud329\ud2b8(Artifacts)\ub97c \uc0dd\uc131\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \uc544\ud2f0\ud329\ud2b8\ub780 evaluation result, log \ub4f1 \uc5b4\ub5a4 \ud615\ud0dc\ub85c\ub4e0 \ud30c\uc77c\ub85c \uc0dd\uc131\ub418\ub294 \uac83\uc744 \ud1b5\ud2c0\uc5b4\uc11c \uce6d\ud558\ub294 \uc6a9\uc5b4\uc785\ub2c8\ub2e4.\n\uadf8\uc911 \uc6b0\ub9ac\uac00 \uad00\uc2ec\uc744 \ub450\ub294 \uc720\uc758\ubbf8\ud55c \uac83\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-5.png",src:n(4454).Z,width:"1700",height:"454"})),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Model"),(0,r.kt)("li",{parentName:"ul"},"Data"),(0,r.kt)("li",{parentName:"ul"},"Metric"),(0,r.kt)("li",{parentName:"ul"},"etc")),(0,r.kt)("h4",{id:"model"},"Model"),(0,r.kt)("p",null,"\uc800\ud76c\ub294 \ubaa8\ub378\uc744 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc758 \ud588\uc2b5\ub2c8\ub2e4."),(0,r.kt)("blockquote",null,(0,r.kt)("p",{parentName:"blockquote"},"\ubaa8\ub378\uc774\ub780 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ud559\uc2b5\ub41c Weights\uc640 Network \uad6c\uc870 \uadf8\ub9ac\uace0 \uc774\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud55c \ud658\uacbd\uc774 \ubaa8\ub450 \ud3ec\ud568\ub41c \ud615\ud0dc")),(0,r.kt)("h4",{id:"data"},"Data"),(0,r.kt)("p",null,"\ub370\uc774\ud130\ub294 \uc804 \ucc98\ub9ac\ub41c \ud53c\ucc98, \ubaa8\ub378\uc758 \uc608\uce21 \uac12 \ub4f1\uc744 \ud3ec\ud568\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"metric"},"Metric"),(0,r.kt)("p",null,"Metric\uc740 \ub3d9\uc801 \uc9c0\ud45c\uc640 \uc815\uc801 \uc9c0\ud45c \ub450 \uac00\uc9c0\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub3d9\uc801 \uc9c0\ud45c\ub780 train loss\uc640 \uac19\uc774 \ud559\uc2b5\uc774 \uc9c4\ud589\ub418\ub294 \uc911 \uc5d0\ud3ed(Epoch)\ub9c8\ub2e4 \uacc4\uc18d\ud574\uc11c \ubcc0\ud654\ud558\ub294 \uac12\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\uc815\uc801 \uc9c0\ud45c\ub780 \ud559\uc2b5\uc774 \ub05d\ub09c \ud6c4 \ucd5c\uc885\uc801\uc73c\ub85c \ubaa8\ub378\uc744 \ud3c9\uac00\ud558\ub294 \uc815\ud655\ub3c4 \ub4f1\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")),(0,r.kt)("h2",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc21c\uc11c\ub3c4\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uc21c\uc11c\ub3c4\ub294 \ubc29\ud5a5 \uc21c\ud658\uc774 \uc5c6\ub294 \uadf8\ub798\ud504\ub85c \uc774\ub8e8\uc5b4\uc838 \uc788\uc73c\uba70, \uac04\ub2e8\ud55c \uc870\uac74\ubb38\uc744 \ud3ec\ud568\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-6.png",src:n(8050).Z,width:"1696",height:"746"})),(0,r.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,r.kt)("p",null,"\uc55e\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574\uc11c\ub294 Config\uac00 \ud544\uc694\ud558\ub2e4\uace0 \uc124\uba85\ud588\uc2b5\ub2c8\ub2e4. \ud30c\uc774\ud504\ub77c\uc778\uc744 \uad6c\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 Config \ub4e4\uc744 \ubaa8\uc544 \ub454 \uac83\uc774 \ud30c\uc774\ud504\ub77c\uc778 Config\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-7.png",src:n(1333).Z,width:"1810",height:"432"})),(0,r.kt)("h2",{id:"run"},"Run"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \ud544\uc694\ub85c \ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\uac00 \uc8fc\uc5b4\uc838\uc57c\uc9c0\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc744 Run \uc774\ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-8.png",src:n(5460).Z,width:"1810",height:"576"})),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub418\uba74 \uac01 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\nKubeflow pipeline\uc5d0\uc11c\ub294 Run \ud558\ub098\ub2f9 \uace0\uc720\ud55c ID \ub97c \uc0dd\uc131\ud558\uace0, Run\uc5d0\uc11c \uc0dd\uc131\ub418\ub294 \ubaa8\ub4e0 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-9.png",src:n(9069).Z,width:"1810",height:"592"})),(0,r.kt)("p",null,"\uadf8\ub7ec\uba74 \uc774\uc81c \uc9c1\uc811 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},5235:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},8694:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},2029:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},2335:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},6425:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},4454:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},8050:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},1333:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},5460:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},9069:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9683],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=o.createContext({}),c=function(e){var t=o.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=c(e.components);return o.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,s=a(e,["components","mdxType","originalType","parentName"]),u=c(n),f=r,m=u["".concat(p,".").concat(f)]||u[f]||d[f]||l;return n?o.createElement(m,i(i({ref:t},s),{},{components:n})):o.createElement(m,i({ref:t},s))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var l=n.length,i=new Array(l);i[0]=f;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:r,i[1]=a;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>l,metadata:()=>a,toc:()=>c});var o=n(7462),r=(n(7294),n(3905));const l={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},i=void 0,a={unversionedId:"kubeflow/kubeflow-concepts",id:"version-1.0/kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/docs/1.0/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-concepts.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/docs/1.0/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/docs/1.0/kubeflow/basic-requirements"}},p={},c=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:c},u="wrapper";function d(e){let{components:t,...l}=e;return(0,r.kt)(u,(0,o.Z)({},s,l,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component"},"Component"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component contents)\uc640 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component wrapper)\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ud1b5\ud574 kubeflow\uc5d0 \uc804\ub2ec\ub418\uba70 \uc804\ub2ec\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589(execute)\ud558\uace0 \uc544\ud2f0\ud329\ud2b8(artifacts)\ub4e4\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-0.png",src:n(5235).Z,width:"1392",height:"704"})),(0,r.kt)("h3",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uad6c\uc131\ud558\ub294 \uac83\uc740 \ucd1d 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-1.png",src:n(8694).Z,width:"574",height:"436"})),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Environemnt"),(0,r.kt)("li",{parentName:"ol"},"Python code w\\ Config"),(0,r.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,r.kt)("p",null,"\uc608\uc2dc\uc640 \ud568\uaed8 \uac01 \uad6c\uc131 \uc694\uc18c\uac00 \uc5b4\ub5a4 \uac83\uc778\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub2e4\uc74c\uacfc \uac19\uc774 \ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc640 SVC(Support Vector Classifier)\ub97c \ud559\uc2b5\ud55c \ud6c4 SVC \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc744 \uc801\uc740 \ud30c\uc774\uc36c \ucf54\ub4dc\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,r.kt)("p",null,"\uc704\uc758 \ud30c\uc774\uc36c \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub85c \ub098\ub20c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-2.png",src:n(2029).Z,width:"832",height:"410"})),(0,r.kt)("p",null,"Environment\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\ub4e4\uc744 import\ud558\ub294 \ubd80\ubd84\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uc73c\ub85c Python Code w\\ Config \uc5d0\uc11c\ub294 \uc8fc\uc5b4\uc9c4 Config\ub97c \uc774\uc6a9\ud574 \uc2e4\uc81c\ub85c \ud559\uc2b5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub9c8\uc9c0\ub9c9\uc73c\ub85c \uc544\ud2f0\ud329\ud2b8\ub97c \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0 \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud558\uace0 \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc791\uc5c5\uc744 \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-3.png",src:n(2335).Z,width:"1066",height:"766"})),(0,r.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc704\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv"),"\uc640 \uac19\uc774 \ud568\uc218\uc758 \ud615\ud0dc\ub85c \uc815\uc758\ud569\ub2c8\ub2e4.\n\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \ucf58\ud150\uce20\ub97c \uac10\uc2f8\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-4.png",src:n(6425).Z,width:"464",height:"826"})),(0,r.kt)("h3",{id:"artifacts"},"Artifacts"),(0,r.kt)("p",null,"\uc704\uc758 \uc124\uba85\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc544\ud2f0\ud329\ud2b8(Artifacts)\ub97c \uc0dd\uc131\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \uc544\ud2f0\ud329\ud2b8\ub780 evaluation result, log \ub4f1 \uc5b4\ub5a4 \ud615\ud0dc\ub85c\ub4e0 \ud30c\uc77c\ub85c \uc0dd\uc131\ub418\ub294 \uac83\uc744 \ud1b5\ud2c0\uc5b4\uc11c \uce6d\ud558\ub294 \uc6a9\uc5b4\uc785\ub2c8\ub2e4.\n\uadf8\uc911 \uc6b0\ub9ac\uac00 \uad00\uc2ec\uc744 \ub450\ub294 \uc720\uc758\ubbf8\ud55c \uac83\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-5.png",src:n(4454).Z,width:"1700",height:"454"})),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Model"),(0,r.kt)("li",{parentName:"ul"},"Data"),(0,r.kt)("li",{parentName:"ul"},"Metric"),(0,r.kt)("li",{parentName:"ul"},"etc")),(0,r.kt)("h4",{id:"model"},"Model"),(0,r.kt)("p",null,"\uc800\ud76c\ub294 \ubaa8\ub378\uc744 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc758 \ud588\uc2b5\ub2c8\ub2e4."),(0,r.kt)("blockquote",null,(0,r.kt)("p",{parentName:"blockquote"},"\ubaa8\ub378\uc774\ub780 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ud559\uc2b5\ub41c Weights\uc640 Network \uad6c\uc870 \uadf8\ub9ac\uace0 \uc774\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud55c \ud658\uacbd\uc774 \ubaa8\ub450 \ud3ec\ud568\ub41c \ud615\ud0dc")),(0,r.kt)("h4",{id:"data"},"Data"),(0,r.kt)("p",null,"\ub370\uc774\ud130\ub294 \uc804 \ucc98\ub9ac\ub41c \ud53c\ucc98, \ubaa8\ub378\uc758 \uc608\uce21 \uac12 \ub4f1\uc744 \ud3ec\ud568\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"metric"},"Metric"),(0,r.kt)("p",null,"Metric\uc740 \ub3d9\uc801 \uc9c0\ud45c\uc640 \uc815\uc801 \uc9c0\ud45c \ub450 \uac00\uc9c0\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"\ub3d9\uc801 \uc9c0\ud45c\ub780 train loss\uc640 \uac19\uc774 \ud559\uc2b5\uc774 \uc9c4\ud589\ub418\ub294 \uc911 \uc5d0\ud3ed(Epoch)\ub9c8\ub2e4 \uacc4\uc18d\ud574\uc11c \ubcc0\ud654\ud558\ub294 \uac12\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"\uc815\uc801 \uc9c0\ud45c\ub780 \ud559\uc2b5\uc774 \ub05d\ub09c \ud6c4 \ucd5c\uc885\uc801\uc73c\ub85c \ubaa8\ub378\uc744 \ud3c9\uac00\ud558\ub294 \uc815\ud655\ub3c4 \ub4f1\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")),(0,r.kt)("h2",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc21c\uc11c\ub3c4\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uc21c\uc11c\ub3c4\ub294 \ubc29\ud5a5 \uc21c\ud658\uc774 \uc5c6\ub294 \uadf8\ub798\ud504\ub85c \uc774\ub8e8\uc5b4\uc838 \uc788\uc73c\uba70, \uac04\ub2e8\ud55c \uc870\uac74\ubb38\uc744 \ud3ec\ud568\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-6.png",src:n(8050).Z,width:"1696",height:"746"})),(0,r.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,r.kt)("p",null,"\uc55e\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574\uc11c\ub294 Config\uac00 \ud544\uc694\ud558\ub2e4\uace0 \uc124\uba85\ud588\uc2b5\ub2c8\ub2e4. \ud30c\uc774\ud504\ub77c\uc778\uc744 \uad6c\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 Config \ub4e4\uc744 \ubaa8\uc544 \ub454 \uac83\uc774 \ud30c\uc774\ud504\ub77c\uc778 Config\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-7.png",src:n(1333).Z,width:"1810",height:"432"})),(0,r.kt)("h2",{id:"run"},"Run"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \ud544\uc694\ub85c \ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\uac00 \uc8fc\uc5b4\uc838\uc57c\uc9c0\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc744 Run \uc774\ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-8.png",src:n(5460).Z,width:"1810",height:"576"})),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub418\uba74 \uac01 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\nKubeflow pipeline\uc5d0\uc11c\ub294 Run \ud558\ub098\ub2f9 \uace0\uc720\ud55c ID \ub97c \uc0dd\uc131\ud558\uace0, Run\uc5d0\uc11c \uc0dd\uc131\ub418\ub294 \ubaa8\ub4e0 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"concept-9.png",src:n(9069).Z,width:"1810",height:"592"})),(0,r.kt)("p",null,"\uadf8\ub7ec\uba74 \uc774\uc81c \uc9c1\uc811 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},5235:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},8694:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},2029:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},2335:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},6425:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},4454:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},8050:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},1333:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},5460:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},9069:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file diff --git a/assets/js/84c20269.4edb664d.js b/assets/js/84c20269.daeb0eed.js similarity index 98% rename from assets/js/84c20269.4edb664d.js rename to assets/js/84c20269.daeb0eed.js index f33badcf..4a04426e 100644 --- a/assets/js/84c20269.4edb664d.js +++ b/assets/js/84c20269.daeb0eed.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1017],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>m});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var c=n.createContext({}),p=function(e){var t=n.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(c.Provider,{value:t},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),s=p(r),k=o,m=s["".concat(c,".").concat(k)]||s[k]||d[k]||a;return r?n.createElement(m,i(i({ref:t},u),{},{components:r})):n.createElement(m,i({ref:t},u))}));function m(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=k;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[s]="string"==typeof e?e:o,i[1]=l;for(var p=2;p{r.r(t),r.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/docker",id:"prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/docs/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/docs/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/docker.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/docs/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/docs/prerequisites/docker/command"}},c={},p=[{value:"\ucee8\ud14c\uc774\ub108",id:"\ucee8\ud14c\uc774\ub108",level:2},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:2},{value:"Layer \ud574\uc11d",id:"layer-\ud574\uc11d",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],u={toc:p},s="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(s,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"\ucee8\ud14c\uc774\ub108"},"\ucee8\ud14c\uc774\ub108"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud558\uac8c \uc2e4\ud589\ud558\ub294 \uae30\uc220"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \ud30c\uc77c\ub4e4\uc758 \uc9d1\ud569"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub780?",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uae30\ubc18\uc73c\ub85c \uc2e4\ud589\ub41c \ud55c \uac1c\uc758 \ud504\ub85c\uc138\uc2a4"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0\ub85c \ucc0d\uc5b4\ub0b8 \ubd95\uc5b4\ube75")))),(0,o.kt)("h2",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,o.kt)("p",null,"\ub3c4\ucee4\ub294 ",(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108\ub97c \uad00\ub9ac"),"\ud558\uace0 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574\uc8fc\ub294 \ud50c\ub7ab\ud3fc\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ub3c4\ucee4\uc758 \uc2ac\ub85c\uac74\uc740 \ubc14\ub85c ",(0,o.kt)("strong",{parentName:"p"},"Build Once, Run Anywhere")," \ub85c \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud55c \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub3c4\ucee4 \ub0b4\ubd80\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \uacfc\uc815\uc744 \ubcf4\uc790\uba74 \uc2e4\uc81c\ub85c container \ub97c \uc704\ud55c \ub9ac\uc18c\uc2a4\ub97c \ubd84\ub9ac\ud558\uace0, lifecycle \uc744 \uc81c\uc5b4\ud558\ub294 \uae30\ub2a5\uc740 linux kernel \uc758 cgroup \ub4f1\uc774 \uc218\ud589\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc774\ub7ec\ud55c \uc778\ud130\ud398\uc774\uc2a4\ub97c \ubc14\ub85c \uc0ac\uc6a9\ud558\ub294 \uac83\uc740 ",(0,o.kt)("strong",{parentName:"p"},"\ub108\ubb34 \uc5b4\ub835\uae30 \ub54c\ubb38\uc5d0")," \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\uc0c1\ud654 layer\ub97c \ub9cc\ub4e4\uac8c \ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(866).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"\uc774\ub97c \ud1b5\ud574 \uc0ac\uc6a9\uc790\ub294 \uc0ac\uc6a9\uc790 \uce5c\ud654\uc801\uc778 API \uc778 ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI")," \ub9cc\uc73c\ub85c \uc27d\uac8c \ucee8\ud14c\uc774\ub108\ub97c \uc81c\uc5b4\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"layer-\ud574\uc11d"},"Layer \ud574\uc11d"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub098\uc628 layer\ub4e4\uc758 \uc5ed\ud560\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: linux kernel \uc758 \uae30\ub2a5\uc744 \uc9c1\uc811 \uc0ac\uc6a9\ud574\uc11c, container \ub77c\ub294 \ud558\ub098\uc758 \ud504\ub85c\uc138\uc2a4\uac00 \uc0ac\uc6a9\ud560 \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc640 cpu, memory, filesystem \ub4f1\uc744 \uaca9\ub9ac\uc2dc\ucf1c\uc8fc\ub294 \uae30\ub2a5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"containerd: runC(OCI layer) \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30 \uc704\ud55c \ucd94\uc0c1\ud654 \ub2e8\uacc4\uc774\uba70, \ud45c\uc900\ud654\ub41c \uc778\ud130\ud398\uc774\uc2a4(OCI)\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"dockerd: containerd \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\ub294 \uc5ed\ud560\ub9cc \ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"docker cli: \uc0ac\uc6a9\uc790\ub294 docker cli \ub85c dockerd (Docker daemon)\uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30\ub9cc \ud558\uba74 \ub429\ub2c8\ub2e4.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc774 \ud1b5\uc2e0 \uacfc\uc815\uc5d0\uc11c unix socket \uc744 \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \uac00\ub054 \ub3c4\ucee4 \uad00\ub828 \uc5d0\ub7ec\uac00 \ub098\uba74 ",(0,o.kt)("inlineCode",{parentName:"li"},"/var/run/docker.sock")," \uac00 \uc0ac\uc6a9 \uc911\uc774\ub2e4, \uad8c\ud55c\uc774 \uc5c6\ub2e4 \ub4f1\ub4f1\uc758 \uc5d0\ub7ec \uba54\uc2dc\uc9c0\uac00 \ub098\uc624\ub294 \uac83\uc785\ub2c8\ub2e4.")))),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ub3c4\ucee4\ub294 \ub9ce\uc740 \ub2e8\uacc4\ub97c \uac10\uc2f8\uace0 \uc788\uc9c0\ub9cc, \ud754\ud788 \ub3c4\ucee4\ub77c\ub294 \uc6a9\uc5b4\ub97c \uc0ac\uc6a9\ud560 \ub54c\ub294 Docker CLI \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0, Dockerd \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0 Docker Container \ud558\ub098\ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uc5b4\uc11c \ud63c\ub780\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \ub098\uc624\ub294 \uae00\uc5d0\uc11c\ub3c4 \ub3c4\ucee4\uac00 \uc5ec\ub7ec\uac00\uc9c0 \uc758\ubbf8\ub85c \uc4f0\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ub3c4\ucee4\ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\uc720\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ub098\uc758 ML \ud559\uc2b5/\ucd94\ub860 \ucf54\ub4dc\ub97c OS, python version, python \ud658\uacbd, \ud2b9\uc815 python package \ubc84\uc804\uc5d0 independent \ud558\ub3c4\ub85d \ud574\uc57c \ud55c\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uadf8\ub798\uc11c \ucf54\ub4dc \ubfd0\ub9cc\uc774 \uc544\ub2cc ",(0,o.kt)("strong",{parentName:"li"},"\ud574\ub2f9 \ucf54\ub4dc\uac00 \uc2e4\ud589\ub418\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \uc885\uc18d\uc801\uc778 \ud328\ud0a4\uc9c0, \ud658\uacbd \ubcc0\uc218, \ud3f4\ub354\uba85 \ub4f1\ub4f1\uc744 \ud558\ub098\uc758 \ud328\ud0a4\uc9c0\ub85c")," \ubb36\uc744 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ud654 \uae30\uc220\uc774\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uc774 \uae30\uc220\uc744 \uc27d\uac8c \uc0ac\uc6a9\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc911 \ud558\ub098\uac00 \ub3c4\ucee4\uc774\uba70, \ud328\ud0a4\uc9c0\ub97c \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub978\ub2e4.")))}d.isMDXComponent=!0},866:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1017],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>m});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var c=n.createContext({}),p=function(e){var t=n.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(c.Provider,{value:t},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),s=p(r),k=o,m=s["".concat(c,".").concat(k)]||s[k]||d[k]||a;return r?n.createElement(m,i(i({ref:t},u),{},{components:r})):n.createElement(m,i({ref:t},u))}));function m(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=k;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[s]="string"==typeof e?e:o,i[1]=l;for(var p=2;p{r.r(t),r.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/docker",id:"prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/docs/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/docs/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/docker.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/docs/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/docs/prerequisites/docker/command"}},c={},p=[{value:"\ucee8\ud14c\uc774\ub108",id:"\ucee8\ud14c\uc774\ub108",level:2},{value:"\ub3c4\ucee4",id:"\ub3c4\ucee4",level:2},{value:"Layer \ud574\uc11d",id:"layer-\ud574\uc11d",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],u={toc:p},s="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(s,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"\ucee8\ud14c\uc774\ub108"},"\ucee8\ud14c\uc774\ub108"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud558\uac8c \uc2e4\ud589\ud558\ub294 \uae30\uc220"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc5b4\ud50c\ub9ac\ucf00\uc774\uc158\uc744 \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \ud30c\uc77c\ub4e4\uc758 \uc9d1\ud569"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0"))),(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108\ub780?",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uae30\ubc18\uc73c\ub85c \uc2e4\ud589\ub41c \ud55c \uac1c\uc758 \ud504\ub85c\uc138\uc2a4"),(0,o.kt)("li",{parentName:"ul"},"\u2192 \ubd95\uc5b4\ube75 \ud2c0\ub85c \ucc0d\uc5b4\ub0b8 \ubd95\uc5b4\ube75")))),(0,o.kt)("h2",{id:"\ub3c4\ucee4"},"\ub3c4\ucee4"),(0,o.kt)("p",null,"\ub3c4\ucee4\ub294 ",(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108\ub97c \uad00\ub9ac"),"\ud558\uace0 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574\uc8fc\ub294 \ud50c\ub7ab\ud3fc\uc785\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ub3c4\ucee4\uc758 \uc2ac\ub85c\uac74\uc740 \ubc14\ub85c ",(0,o.kt)("strong",{parentName:"p"},"Build Once, Run Anywhere")," \ub85c \uc5b4\ub514\uc5d0\uc11c\ub098 \ub3d9\uc77c\ud55c \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub3c4\ucee4 \ub0b4\ubd80\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \uacfc\uc815\uc744 \ubcf4\uc790\uba74 \uc2e4\uc81c\ub85c container \ub97c \uc704\ud55c \ub9ac\uc18c\uc2a4\ub97c \ubd84\ub9ac\ud558\uace0, lifecycle \uc744 \uc81c\uc5b4\ud558\ub294 \uae30\ub2a5\uc740 linux kernel \uc758 cgroup \ub4f1\uc774 \uc218\ud589\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc774\ub7ec\ud55c \uc778\ud130\ud398\uc774\uc2a4\ub97c \ubc14\ub85c \uc0ac\uc6a9\ud558\ub294 \uac83\uc740 ",(0,o.kt)("strong",{parentName:"p"},"\ub108\ubb34 \uc5b4\ub835\uae30 \ub54c\ubb38\uc5d0")," \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\uc0c1\ud654 layer\ub97c \ub9cc\ub4e4\uac8c \ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(866).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"\uc774\ub97c \ud1b5\ud574 \uc0ac\uc6a9\uc790\ub294 \uc0ac\uc6a9\uc790 \uce5c\ud654\uc801\uc778 API \uc778 ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI")," \ub9cc\uc73c\ub85c \uc27d\uac8c \ucee8\ud14c\uc774\ub108\ub97c \uc81c\uc5b4\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"layer-\ud574\uc11d"},"Layer \ud574\uc11d"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub098\uc628 layer\ub4e4\uc758 \uc5ed\ud560\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: linux kernel \uc758 \uae30\ub2a5\uc744 \uc9c1\uc811 \uc0ac\uc6a9\ud574\uc11c, container \ub77c\ub294 \ud558\ub098\uc758 \ud504\ub85c\uc138\uc2a4\uac00 \uc0ac\uc6a9\ud560 \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc640 cpu, memory, filesystem \ub4f1\uc744 \uaca9\ub9ac\uc2dc\ucf1c\uc8fc\ub294 \uae30\ub2a5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"containerd: runC(OCI layer) \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30 \uc704\ud55c \ucd94\uc0c1\ud654 \ub2e8\uacc4\uc774\uba70, \ud45c\uc900\ud654\ub41c \uc778\ud130\ud398\uc774\uc2a4(OCI)\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"dockerd: containerd \uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\ub294 \uc5ed\ud560\ub9cc \ud569\ub2c8\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"docker cli: \uc0ac\uc6a9\uc790\ub294 docker cli \ub85c dockerd (Docker daemon)\uc5d0\uac8c \uba85\ub839\uc744 \ub0b4\ub9ac\uae30\ub9cc \ud558\uba74 \ub429\ub2c8\ub2e4.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"\uc774 \ud1b5\uc2e0 \uacfc\uc815\uc5d0\uc11c unix socket \uc744 \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \uac00\ub054 \ub3c4\ucee4 \uad00\ub828 \uc5d0\ub7ec\uac00 \ub098\uba74 ",(0,o.kt)("inlineCode",{parentName:"li"},"/var/run/docker.sock")," \uac00 \uc0ac\uc6a9 \uc911\uc774\ub2e4, \uad8c\ud55c\uc774 \uc5c6\ub2e4 \ub4f1\ub4f1\uc758 \uc5d0\ub7ec \uba54\uc2dc\uc9c0\uac00 \ub098\uc624\ub294 \uac83\uc785\ub2c8\ub2e4.")))),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ub3c4\ucee4\ub294 \ub9ce\uc740 \ub2e8\uacc4\ub97c \uac10\uc2f8\uace0 \uc788\uc9c0\ub9cc, \ud754\ud788 \ub3c4\ucee4\ub77c\ub294 \uc6a9\uc5b4\ub97c \uc0ac\uc6a9\ud560 \ub54c\ub294 Docker CLI \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0, Dockerd \ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uace0 Docker Container \ud558\ub098\ub97c \ub9d0\ud560 \ub54c\ub3c4 \uc788\uc5b4\uc11c \ud63c\ub780\uc774 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc55e\uc73c\ub85c \ub098\uc624\ub294 \uae00\uc5d0\uc11c\ub3c4 \ub3c4\ucee4\uac00 \uc5ec\ub7ec\uac00\uc9c0 \uc758\ubbf8\ub85c \uc4f0\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\uac00 \ub3c4\ucee4\ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\uc720\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ub098\uc758 ML \ud559\uc2b5/\ucd94\ub860 \ucf54\ub4dc\ub97c OS, python version, python \ud658\uacbd, \ud2b9\uc815 python package \ubc84\uc804\uc5d0 independent \ud558\ub3c4\ub85d \ud574\uc57c \ud55c\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uadf8\ub798\uc11c \ucf54\ub4dc \ubfd0\ub9cc\uc774 \uc544\ub2cc ",(0,o.kt)("strong",{parentName:"li"},"\ud574\ub2f9 \ucf54\ub4dc\uac00 \uc2e4\ud589\ub418\uae30 \uc704\ud574 \ud544\uc694\ud55c \ubaa8\ub4e0 \uc885\uc18d\uc801\uc778 \ud328\ud0a4\uc9c0, \ud658\uacbd \ubcc0\uc218, \ud3f4\ub354\uba85 \ub4f1\ub4f1\uc744 \ud558\ub098\uc758 \ud328\ud0a4\uc9c0\ub85c")," \ubb36\uc744 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ud654 \uae30\uc220\uc774\ub2e4."),(0,o.kt)("li",{parentName:"ol"},"\uc774 \uae30\uc220\uc744 \uc27d\uac8c \uc0ac\uc6a9\ud558\uace0 \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \uc911 \ud558\ub098\uac00 \ub3c4\ucee4\uc774\uba70, \ud328\ud0a4\uc9c0\ub97c \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub978\ub2e4.")))}d.isMDXComponent=!0},866:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file diff --git a/assets/js/88b38b2b.0eb7b3c5.js b/assets/js/88b38b2b.4207e29d.js similarity index 99% rename from assets/js/88b38b2b.0eb7b3c5.js rename to assets/js/88b38b2b.4207e29d.js index 18ceca21..21e9a00f 100644 --- a/assets/js/88b38b2b.0eb7b3c5.js +++ b/assets/js/88b38b2b.4207e29d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4299],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>f});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function p(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var l=r.createContext({}),u=function(e){var n=r.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):p(p({},n),e)),t},m=function(e){var n=u(e.components);return r.createElement(l.Provider,{value:n},e.children)},s="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),s=u(t),d=o,f=s["".concat(l,".").concat(d)]||s[d]||c[d]||a;return t?r.createElement(f,p(p({ref:n},m),{},{components:t})):r.createElement(f,p({ref:n},m))}));function f(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var a=t.length,p=new Array(a);p[0]=d;var i={};for(var l in n)hasOwnProperty.call(n,l)&&(i[l]=n[l]);i.originalType=e,i[s]="string"==typeof e?e:o,p[1]=i;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>p,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var r=t(7462),o=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},p=void 0,i={unversionedId:"kubeflow/basic-component",id:"kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/docs/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/docs/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/docs/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/docs/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],m={toc:u},s="wrapper";function c(e){let{components:n,...t}=e;return(0,o.kt)(s,(0,r.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"component"},"Component"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub97c \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component Contents) \uc791\uc131"),(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component Wrapper) \uc791\uc131")),(0,o.kt)("p",null,"\uc774\uc81c \uac01 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub294 \uc6b0\ub9ac\uac00 \ud754\ud788 \uc791\uc131\ud558\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ub2e4\ub974\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc22b\uc790\ub97c \uc785\ub825\uc73c\ub85c \ubc1b\uace0 \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud55c \ub4a4 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 \uc5d0\ub7ec\uac00 \ub098\uace0 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294\ub370 \uadf8 \uc774\uc720\ub294 \ucd9c\ub825\ud574\uc57c \ud560 ",(0,o.kt)("inlineCode",{parentName:"p"},"number"),"\uac00 \uc815\uc758\ub418\uc5b4 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),"\uc5d0\uc11c ",(0,o.kt)("inlineCode",{parentName:"p"},"number")," \uc640 \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c \uac12\ub4e4\uc740 ",(0,o.kt)("strong",{parentName:"p"},"Config"),"\ub85c \uc815\uc758\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c Config\ub4e4\uc740 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0\uc11c \uc804\ub2ec\uc774 \ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"\uc774\uc81c \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud560 \uc218 \uc788\ub3c4\ub85d \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ub9cc\ub4e4\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubcc4\ub3c4\uc758 Config \uc5c6\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub85c \uac10\uc300 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,o.kt)("p",null,"\uc774\uc81c \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c Config\ub97c \ub798\ud37c\uc758 argument\ub85c \ucd94\uac00\ud569\ub2c8\ub2e4. \ub2e4\ub9cc, argument \ub9cc\uc744 \uc801\ub294 \uac83\uc774 \uc544\ub2c8\ub77c argument\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub3c4 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4. Kubeflow\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 Kubeflow \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud560 \ub54c, \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc758 \uc5f0\uacb0\uc5d0\uc11c \uc815\ud574\uc9c4 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \ud0c0\uc785\uc774 \uc77c\uce58\ud558\ub294\uc9c0 \uccb4\ud06c\ud569\ub2c8\ub2e4. \ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \ud544\uc694\ub85c \ud558\ub294 \uc785\ub825\uacfc \ub2e4\ub978 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc804\ub2ec\ubc1b\uc740 \ucd9c\ub825\uc758 \ud3ec\ub9f7\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0 \ud30c\uc774\ud504\ub77c\uc778 \uc0dd\uc131\uc744 \ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc81c \ub2e4\uc74c\uacfc \uac19\uc774 argument\uc640 \uadf8 \ud0c0\uc785, \uadf8\ub9ac\uace0 \ubc18\ud658\ud558\ub294 \ud0c0\uc785\uc744 \uc801\uc5b4\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc644\uc131\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ubc18\ud658 \uac12\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\uc740 json\uc5d0\uc11c \ud45c\ud604\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\ub4e4\ub9cc \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub300\ud45c\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\uba70 \uad8c\uc7a5\ud558\ub294 \ud0c0\uc785\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"int"),(0,o.kt)("li",{parentName:"ul"},"float"),(0,o.kt)("li",{parentName:"ul"},"str")),(0,o.kt)("p",null,"\ub9cc\uc57d \ub2e8\uc77c \uac12\uc774 \uc544\ub2cc \uc5ec\ub7ec \uac12\uc744 \ubc18\ud658\ud558\ub824\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"collections.namedtuple")," \uc744 \uc774\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ucc38\uace0 \ud558\uc2dc\uae38 \ubc14\ub78d\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c 2\ub85c \ub098\ub208 \ubaab\uacfc \ub098\uba38\uc9c0\ub97c \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud574\uc57c \ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func")," \ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub807\uac8c \ubcc0\ud658\ub41c \ud615\ud0dc\ub294 \ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\ub85c import \ud558\uc5ec\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,o.kt)("p",null,"\ub9cc\uc57d \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uacf5\uc720\ub97c \ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0 YAML \ud30c\uc77c\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uacf5\uc720\ud574\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \uc704\ud574\uc11c\ub294 \uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud55c \ub4a4 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file")," \uc744 \ud1b5\ud574 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6b0\uc120 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,o.kt)("p",null,"\uc791\uc131\ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,o.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ud30c\uc77c\uc744 \uacf5\uc720\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,o.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull "),": \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \ud658\uacbd \uc815\ubcf4\uac00 \ub2f4\uae34 \uc774\ubbf8\uc9c0\ub97c pull"),(0,o.kt)("li",{parentName:"ol"},"run ",(0,o.kt)("inlineCode",{parentName:"li"},"command"),": pull \ud55c \uc774\ubbf8\uc9c0\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4. ")),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ub97c \uc608\uc2dc\ub85c \ub4e4\uc790\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," \uc758 default image \ub294 python:3.7 \uc774\ubbc0\ub85c \ud574\ub2f9 \uc774\ubbf8\uc9c0\ub97c \uae30\uc900\uc73c\ub85c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. "),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,o.kt)("h2",{id:"references"},"References:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4299],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>f});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function p(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var l=r.createContext({}),u=function(e){var n=r.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):p(p({},n),e)),t},m=function(e){var n=u(e.components);return r.createElement(l.Provider,{value:n},e.children)},s="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),s=u(t),d=o,f=s["".concat(l,".").concat(d)]||s[d]||c[d]||a;return t?r.createElement(f,p(p({ref:n},m),{},{components:t})):r.createElement(f,p({ref:n},m))}));function f(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var a=t.length,p=new Array(a);p[0]=d;var i={};for(var l in n)hasOwnProperty.call(n,l)&&(i[l]=n[l]);i.originalType=e,i[s]="string"==typeof e?e:o,p[1]=i;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>p,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var r=t(7462),o=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},p=void 0,i={unversionedId:"kubeflow/basic-component",id:"kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/docs/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/docs/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/docs/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/docs/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],m={toc:u},s="wrapper";function c(e){let{components:n,...t}=e;return(0,o.kt)(s,(0,r.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"component"},"Component"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub97c \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component Contents) \uc791\uc131"),(0,o.kt)("li",{parentName:"ol"},"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component Wrapper) \uc791\uc131")),(0,o.kt)("p",null,"\uc774\uc81c \uac01 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub294 \uc6b0\ub9ac\uac00 \ud754\ud788 \uc791\uc131\ud558\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ub2e4\ub974\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc22b\uc790\ub97c \uc785\ub825\uc73c\ub85c \ubc1b\uace0 \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud55c \ub4a4 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc774 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 \uc5d0\ub7ec\uac00 \ub098\uace0 \ub3d9\uc791\ud558\uc9c0 \uc54a\ub294\ub370 \uadf8 \uc774\uc720\ub294 \ucd9c\ub825\ud574\uc57c \ud560 ",(0,o.kt)("inlineCode",{parentName:"p"},"number"),"\uac00 \uc815\uc758\ub418\uc5b4 \uc788\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),"\uc5d0\uc11c ",(0,o.kt)("inlineCode",{parentName:"p"},"number")," \uc640 \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c \uac12\ub4e4\uc740 ",(0,o.kt)("strong",{parentName:"p"},"Config"),"\ub85c \uc815\uc758\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574 \ud544\uc694\ud55c Config\ub4e4\uc740 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uc5d0\uc11c \uc804\ub2ec\uc774 \ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"\uc774\uc81c \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud560 \uc218 \uc788\ub3c4\ub85d \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ub9cc\ub4e4\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubcc4\ub3c4\uc758 Config \uc5c6\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub85c \uac10\uc300 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,o.kt)("p",null,"\uc774\uc81c \ucf58\ud150\uce20\uc5d0\uc11c \ud544\uc694\ud55c Config\ub97c \ub798\ud37c\uc758 argument\ub85c \ucd94\uac00\ud569\ub2c8\ub2e4. \ub2e4\ub9cc, argument \ub9cc\uc744 \uc801\ub294 \uac83\uc774 \uc544\ub2c8\ub77c argument\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub3c4 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4. Kubeflow\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 Kubeflow \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud560 \ub54c, \ucef4\ud3ec\ub10c\ud2b8 \uac04\uc758 \uc5f0\uacb0\uc5d0\uc11c \uc815\ud574\uc9c4 \uc785\ub825\uacfc \ucd9c\ub825\uc758 \ud0c0\uc785\uc774 \uc77c\uce58\ud558\ub294\uc9c0 \uccb4\ud06c\ud569\ub2c8\ub2e4. \ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \ud544\uc694\ub85c \ud558\ub294 \uc785\ub825\uacfc \ub2e4\ub978 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc804\ub2ec\ubc1b\uc740 \ucd9c\ub825\uc758 \ud3ec\ub9f7\uc774 \uc77c\uce58\ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0 \ud30c\uc774\ud504\ub77c\uc778 \uc0dd\uc131\uc744 \ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc81c \ub2e4\uc74c\uacfc \uac19\uc774 argument\uc640 \uadf8 \ud0c0\uc785, \uadf8\ub9ac\uace0 \ubc18\ud658\ud558\ub294 \ud0c0\uc785\uc744 \uc801\uc5b4\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc644\uc131\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ubc18\ud658 \uac12\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\uc740 json\uc5d0\uc11c \ud45c\ud604\ud560 \uc218 \uc788\ub294 \ud0c0\uc785\ub4e4\ub9cc \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub300\ud45c\uc801\uc73c\ub85c \uc0ac\uc6a9\ub418\uba70 \uad8c\uc7a5\ud558\ub294 \ud0c0\uc785\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"int"),(0,o.kt)("li",{parentName:"ul"},"float"),(0,o.kt)("li",{parentName:"ul"},"str")),(0,o.kt)("p",null,"\ub9cc\uc57d \ub2e8\uc77c \uac12\uc774 \uc544\ub2cc \uc5ec\ub7ec \uac12\uc744 \ubc18\ud658\ud558\ub824\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"collections.namedtuple")," \uc744 \uc774\uc6a9\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ucc38\uace0 \ud558\uc2dc\uae38 \ubc14\ub78d\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c \uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c 2\ub85c \ub098\ub208 \ubaab\uacfc \ub098\uba38\uc9c0\ub97c \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud3ec\ub9f7\uc73c\ub85c \ubcc0\ud658\ud574\uc57c \ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func")," \ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub807\uac8c \ubcc0\ud658\ub41c \ud615\ud0dc\ub294 \ud30c\uc774\uc36c\uc5d0\uc11c \ud568\uc218\ub85c import \ud558\uc5ec\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,o.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,o.kt)("p",null,"\ub9cc\uc57d \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \uacf5\uc720\ub97c \ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0 YAML \ud30c\uc77c\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uacf5\uc720\ud574\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \uc704\ud574\uc11c\ub294 \uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud55c \ub4a4 ",(0,o.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file")," \uc744 \ud1b5\ud574 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6b0\uc120 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c YAML \ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,o.kt)("p",null,"\uc791\uc131\ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \uc2e4\ud589\ud558\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,o.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ud30c\uc77c\uc744 \uacf5\uc720\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,o.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,o.kt)("p",null,"Kubeflow\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull "),": \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \ud658\uacbd \uc815\ubcf4\uac00 \ub2f4\uae34 \uc774\ubbf8\uc9c0\ub97c pull"),(0,o.kt)("li",{parentName:"ol"},"run ",(0,o.kt)("inlineCode",{parentName:"li"},"command"),": pull \ud55c \uc774\ubbf8\uc9c0\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4. ")),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," \ub97c \uc608\uc2dc\ub85c \ub4e4\uc790\uba74 ",(0,o.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," \uc758 default image \ub294 python:3.7 \uc774\ubbc0\ub85c \ud574\ub2f9 \uc774\ubbf8\uc9c0\ub97c \uae30\uc900\uc73c\ub85c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4. "),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,o.kt)("h2",{id:"references"},"References:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8999d56c.daf73b12.js b/assets/js/8999d56c.39b342ab.js similarity index 99% rename from assets/js/8999d56c.daf73b12.js rename to assets/js/8999d56c.39b342ab.js index 00dcf518..da6e6ec3 100644 --- a/assets/js/8999d56c.daf73b12.js +++ b/assets/js/8999d56c.39b342ab.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8231],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},k="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),k=p(a),c=r,d=k["".concat(s,".").concat(c)]||k[c]||m[c]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=c;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[k]="string"==typeof e?e:r,l[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},l=void 0,i={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"version-1.0/kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/docs/1.0/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/docs/1.0/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/docs/1.0/kubeflow-dashboard-guide/tensorboards"}},s={},p=[{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30",level:2}],u={toc:p},k="wrapper";function m(e){let{components:t,...o}=e;return(0,r.kt)(k,(0,n.Z)({},u,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30"),(0,r.kt)("p",null,"\ub2e4\uc74c Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Notebooks\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:a(6316).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"Notebooks \ud0ed\uc740 JupyterHub\uc640 \ube44\uc2b7\ud558\uac8c \uc720\uc800\ubcc4\ub85c jupyter notebook \ubc0f code server \ud658\uacbd(\uc774\ud558 \ub178\ud2b8\ubd81 \uc11c\ubc84)\uc744 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc0dd\uc131\ud558\uace0 \uc811\uc18d\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-home",src:a(1579).Z,width:"5008",height:"2682"})),(0,r.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW NOTEBOOK")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-notebook",src:a(1286).Z,width:"1900",height:"312"})),(0,r.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \ud654\uba74\uc774 \ub098\ud0c0\ub098\uba74, \uc774\uc81c \uc0dd\uc131\ud560 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc2a4\ud399(Spec)\uc744 \uba85\uc2dc\ud558\uc5ec \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create",src:a(3516).Z,width:"1738",height:"1674"})),(0,r.kt)("details",null,(0,r.kt)("summary",null,"\uac01 \uc2a4\ud399\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"name"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uad6c\ubd84\ud560 \uc218 \uc788\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"namespace")," :",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub530\ub85c \ubcc0\uacbd\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (\ud604\uc7ac \ub85c\uadf8\uc778\ud55c user \uacc4\uc815\uc758 namespace\uc774 \uc790\ub3d9\uc73c\ub85c \uc9c0\uc815\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.)"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Image"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"sklearn, pytorch, tensorflow \ub4f1\uc758 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0\uac00 \ubbf8\ub9ac \uc124\uce58\ub41c jupyter lab \uc774\ubbf8\uc9c0 \uc911 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c GPU\ub97c \uc0ac\uc6a9\ud558\uc5ec tensorflow-cuda, pytorch-cuda \ub4f1\uc758 \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, ",(0,r.kt)("strong",{parentName:"li"},"\ud558\ub2e8\uc758 GPUs")," \ubd80\ubd84\uc744 \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0\ub098 \uc18c\uc2a4\ucf54\ub4dc \ub4f1\uc744 \ud3ec\ud568\ud55c \ucee4\uc2a4\ud140(Custom) \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ucee4\uc2a4\ud140 \uc774\ubbf8\uc9c0(Custom Image)\ub97c \ub9cc\ub4e4\uace0 \ubc30\ud3ec \ud6c4 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"CPU / RAM"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ud544\uc694\ud55c \uc790\uc6d0 \uc0ac\uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"cpu : core \ub2e8\uc704",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uac00\uc0c1 core \uac1c\uc218 \ub2e8\uc704\ub97c \uc758\ubbf8\ud558\uba70, int \ud615\uc2dd\uc774 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,r.kt)("inlineCode",{parentName:"li"},"2.7")," \ub4f1\uc758 float \ud615\uc2dd\ub3c4 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"memory : Gi \ub2e8\uc704"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"GPUs"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc5d0 \ud560\ub2f9\ud560 GPU \uac1c\uc218\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"None"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU \uc790\uc6d0\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc740 \uc0c1\ud669"))),(0,r.kt)("li",{parentName:"ul"},"1, 2, 4",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU 1, 2, 4 \uac1c \ud560\ub2f9"))))),(0,r.kt)("li",{parentName:"ul"},"GPU Vendor",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc55e\uc758 ",(0,r.kt)("a",{parentName:"li",href:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," \ub97c \ub530\ub77c nvidia gpu plugin\uc744 \uc124\uce58\ud558\uc600\ub2e4\uba74 NVIDIA\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Workspace Volume"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c \ud544\uc694\ud55c \ub9cc\ud07c\uc758 \ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"Type \uacfc Name \uc740 \ubcc0\uacbd\ud558\uc9c0 \uc54a\uace0, ",(0,r.kt)("strong",{parentName:"li"},"\ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \ub298\ub9ac\uace0 \uc2f6\uac70\ub098")," ",(0,r.kt)("strong",{parentName:"li"},"AccessMode \ub97c \ubcc0\uacbd\ud558\uace0 \uc2f6\uc744")," \ub54c\uc5d0\ub9cc \ubcc0\uacbd\ud574\uc11c \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," \uccb4\ud06c\ubc15\uc2a4\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc791\uc5c5 \ub0b4\uc6a9\uc744 \uc800\uc7a5\ud558\uc9c0 \uc54a\uc544\ub3c4 \uc0c1\uad00\uc5c6\uc744 \ub54c\uc5d0\ub9cc \ud074\ub9ad\ud569\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"li"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ub204\ub974\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.")),(0,r.kt)("li",{parentName:"ul"},'\uae30\uc874\uc5d0 \ubbf8\ub9ac \uc0dd\uc131\ud574\ub450\uc5c8\ub358 PVC\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \ub54c\uc5d0\ub294, Type\uc744 "Existing" \uc73c\ub85c \uc785\ub825\ud558\uc5ec \ud574\ub2f9 PVC\uc758 \uc774\ub984\uc744 \uc785\ub825\ud558\uc5ec \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.'))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Data Volumes"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \uc2a4\ud1a0\ub9ac\uc9c0 \uc790\uc6d0\uc774 \ud544\uc694\ud558\ub2e4\uba74 ",(0,r.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ud544\uc694\ud558\uc9c0 \uc54a\uc73c\ubbc0\ub85c ",(0,r.kt)("em",{parentName:"li"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \uc124\uba85\uc744 \uc0dd\ub7b5\ud569\ub2c8\ub2e4."))))),(0,r.kt)("p",null,"\ubaa8\ub450 \uc815\uc0c1\uc801\uc73c\ub85c \uc785\ub825\ud558\uc600\ub2e4\uba74 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},"LAUNCH")," \ubc84\ud2bc\uc774 \ud65c\uc131\ud654\ub418\uba70, \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131\uc774 \uc2dc\uc791\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating",src:a(8112).Z,width:"1928",height:"400"})),(0,r.kt)("p",null,"\uc0dd\uc131 \ud6c4 \uc544\ub798\uc640 \uac19\uc774 ",(0,r.kt)("strong",{parentName:"p"},"Status")," \uac00 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc \uc544\uc774\ucf58\uc73c\ub85c \ubcc0\ud558\uba70, ",(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc774 \ud65c\uc131\ud654\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"created",src:a(1325).Z,width:"1852",height:"352"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc744 \ud074\ub9ad\ud558\uba74 \ube0c\ub77c\uc6b0\uc800\uc5d0 \uc0c8 \ucc3d\uc774 \uc5f4\ub9ac\uba70, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-access",src:a(323).Z,width:"2898",height:"1990"})),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Launcher"),"\uc758 Notebook, Console, Terminal \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Notebook \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-console",src:a(1710).Z,width:"2850",height:"736"})),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Terminal \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"terminal-console",src:a(5668).Z,width:"2834",height:"806"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30"),(0,r.kt)("p",null,"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc624\ub79c \uc2dc\uac04 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc758 \ud6a8\uc728\uc801\uc778 \ub9ac\uc18c\uc2a4 \uc0ac\uc6a9\uc744 \uc704\ud574\uc11c \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc911\ub2e8(Stop)\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ub2e8, \uc774 \uacbd\uc6b0 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \uc2dc Workspace Volume \ub610\ub294 Data Volume\uc73c\ub85c \uc9c0\uc815\ud574\ub193\uc740 \uacbd\ub85c \uc678\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \ucd08\uae30\ud654\ub418\ub294 \uac83\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("br",{parentName:"p"}),"\n","\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \ub2f9\uc2dc \uacbd\ub85c\ub97c \ubcc0\uacbd\ud558\uc9c0 \uc54a\uc558\ub2e4\uba74, \ub514\ud3f4\ud2b8(Default) Workspace Volume\uc758 \uacbd\ub85c\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc774\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc758 \ud558\uc704 \uacbd\ub85c \uc774\uc678\uc758 \uacbd\ub85c\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \uc0ac\ub77c\uc9d1\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"STOP")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84\uac00 \uc911\ub2e8\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-stop",src:a(6320).Z,width:"1832",height:"1014"})),(0,r.kt)("p",null,"\uc911\ub2e8\uc774 \uc644\ub8cc\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"CONNECT")," \ubc84\ud2bc\uc774 \ube44\ud65c\uc131\ud654\ub418\uba70, ",(0,r.kt)("inlineCode",{parentName:"p"},"PLAY")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc2dc \uc815\uc0c1\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-restart",src:a(6421).Z,width:"1888",height:"932"})))}m.isMDXComponent=!0},3516:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},1325:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},8112:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},6316:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1286:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},323:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},1710:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},1579:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},6421:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},6320:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},5668:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8231],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},k="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),k=p(a),c=r,d=k["".concat(s,".").concat(c)]||k[c]||m[c]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=c;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[k]="string"==typeof e?e:r,l[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},l=void 0,i={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"version-1.0/kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/docs/1.0/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/docs/1.0/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/docs/1.0/kubeflow-dashboard-guide/tensorboards"}},s={},p=[{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30",level:2}],u={toc:p},k="wrapper";function m(e){let{components:t,...o}=e;return(0,r.kt)(k,(0,n.Z)({},u,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30"),(0,r.kt)("p",null,"\ub2e4\uc74c Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Notebooks\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:a(6316).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"Notebooks \ud0ed\uc740 JupyterHub\uc640 \ube44\uc2b7\ud558\uac8c \uc720\uc800\ubcc4\ub85c jupyter notebook \ubc0f code server \ud658\uacbd(\uc774\ud558 \ub178\ud2b8\ubd81 \uc11c\ubc84)\uc744 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc0dd\uc131\ud558\uace0 \uc811\uc18d\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-home",src:a(1579).Z,width:"5008",height:"2682"})),(0,r.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW NOTEBOOK")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-notebook",src:a(1286).Z,width:"1900",height:"312"})),(0,r.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \ud654\uba74\uc774 \ub098\ud0c0\ub098\uba74, \uc774\uc81c \uc0dd\uc131\ud560 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc2a4\ud399(Spec)\uc744 \uba85\uc2dc\ud558\uc5ec \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create",src:a(3516).Z,width:"1738",height:"1674"})),(0,r.kt)("details",null,(0,r.kt)("summary",null,"\uac01 \uc2a4\ud399\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"name"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uad6c\ubd84\ud560 \uc218 \uc788\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"namespace")," :",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub530\ub85c \ubcc0\uacbd\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (\ud604\uc7ac \ub85c\uadf8\uc778\ud55c user \uacc4\uc815\uc758 namespace\uc774 \uc790\ub3d9\uc73c\ub85c \uc9c0\uc815\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.)"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Image"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"sklearn, pytorch, tensorflow \ub4f1\uc758 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0\uac00 \ubbf8\ub9ac \uc124\uce58\ub41c jupyter lab \uc774\ubbf8\uc9c0 \uc911 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c GPU\ub97c \uc0ac\uc6a9\ud558\uc5ec tensorflow-cuda, pytorch-cuda \ub4f1\uc758 \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, ",(0,r.kt)("strong",{parentName:"li"},"\ud558\ub2e8\uc758 GPUs")," \ubd80\ubd84\uc744 \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0\ub098 \uc18c\uc2a4\ucf54\ub4dc \ub4f1\uc744 \ud3ec\ud568\ud55c \ucee4\uc2a4\ud140(Custom) \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ucee4\uc2a4\ud140 \uc774\ubbf8\uc9c0(Custom Image)\ub97c \ub9cc\ub4e4\uace0 \ubc30\ud3ec \ud6c4 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"CPU / RAM"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ud544\uc694\ud55c \uc790\uc6d0 \uc0ac\uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"cpu : core \ub2e8\uc704",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uac00\uc0c1 core \uac1c\uc218 \ub2e8\uc704\ub97c \uc758\ubbf8\ud558\uba70, int \ud615\uc2dd\uc774 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,r.kt)("inlineCode",{parentName:"li"},"2.7")," \ub4f1\uc758 float \ud615\uc2dd\ub3c4 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"memory : Gi \ub2e8\uc704"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"GPUs"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc5d0 \ud560\ub2f9\ud560 GPU \uac1c\uc218\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"None"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU \uc790\uc6d0\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc740 \uc0c1\ud669"))),(0,r.kt)("li",{parentName:"ul"},"1, 2, 4",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU 1, 2, 4 \uac1c \ud560\ub2f9"))))),(0,r.kt)("li",{parentName:"ul"},"GPU Vendor",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc55e\uc758 ",(0,r.kt)("a",{parentName:"li",href:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," \ub97c \ub530\ub77c nvidia gpu plugin\uc744 \uc124\uce58\ud558\uc600\ub2e4\uba74 NVIDIA\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Workspace Volume"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c \ud544\uc694\ud55c \ub9cc\ud07c\uc758 \ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"Type \uacfc Name \uc740 \ubcc0\uacbd\ud558\uc9c0 \uc54a\uace0, ",(0,r.kt)("strong",{parentName:"li"},"\ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \ub298\ub9ac\uace0 \uc2f6\uac70\ub098")," ",(0,r.kt)("strong",{parentName:"li"},"AccessMode \ub97c \ubcc0\uacbd\ud558\uace0 \uc2f6\uc744")," \ub54c\uc5d0\ub9cc \ubcc0\uacbd\ud574\uc11c \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," \uccb4\ud06c\ubc15\uc2a4\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc791\uc5c5 \ub0b4\uc6a9\uc744 \uc800\uc7a5\ud558\uc9c0 \uc54a\uc544\ub3c4 \uc0c1\uad00\uc5c6\uc744 \ub54c\uc5d0\ub9cc \ud074\ub9ad\ud569\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"li"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ub204\ub974\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.")),(0,r.kt)("li",{parentName:"ul"},'\uae30\uc874\uc5d0 \ubbf8\ub9ac \uc0dd\uc131\ud574\ub450\uc5c8\ub358 PVC\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \ub54c\uc5d0\ub294, Type\uc744 "Existing" \uc73c\ub85c \uc785\ub825\ud558\uc5ec \ud574\ub2f9 PVC\uc758 \uc774\ub984\uc744 \uc785\ub825\ud558\uc5ec \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.'))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Data Volumes"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \uc2a4\ud1a0\ub9ac\uc9c0 \uc790\uc6d0\uc774 \ud544\uc694\ud558\ub2e4\uba74 ",(0,r.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ud544\uc694\ud558\uc9c0 \uc54a\uc73c\ubbc0\ub85c ",(0,r.kt)("em",{parentName:"li"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \uc124\uba85\uc744 \uc0dd\ub7b5\ud569\ub2c8\ub2e4."))))),(0,r.kt)("p",null,"\ubaa8\ub450 \uc815\uc0c1\uc801\uc73c\ub85c \uc785\ub825\ud558\uc600\ub2e4\uba74 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},"LAUNCH")," \ubc84\ud2bc\uc774 \ud65c\uc131\ud654\ub418\uba70, \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131\uc774 \uc2dc\uc791\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating",src:a(8112).Z,width:"1928",height:"400"})),(0,r.kt)("p",null,"\uc0dd\uc131 \ud6c4 \uc544\ub798\uc640 \uac19\uc774 ",(0,r.kt)("strong",{parentName:"p"},"Status")," \uac00 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc \uc544\uc774\ucf58\uc73c\ub85c \ubcc0\ud558\uba70, ",(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc774 \ud65c\uc131\ud654\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"created",src:a(1325).Z,width:"1852",height:"352"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc744 \ud074\ub9ad\ud558\uba74 \ube0c\ub77c\uc6b0\uc800\uc5d0 \uc0c8 \ucc3d\uc774 \uc5f4\ub9ac\uba70, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-access",src:a(323).Z,width:"2898",height:"1990"})),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Launcher"),"\uc758 Notebook, Console, Terminal \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Notebook \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-console",src:a(1710).Z,width:"2850",height:"736"})),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Terminal \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"terminal-console",src:a(5668).Z,width:"2834",height:"806"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30"),(0,r.kt)("p",null,"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc624\ub79c \uc2dc\uac04 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc758 \ud6a8\uc728\uc801\uc778 \ub9ac\uc18c\uc2a4 \uc0ac\uc6a9\uc744 \uc704\ud574\uc11c \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc911\ub2e8(Stop)\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ub2e8, \uc774 \uacbd\uc6b0 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \uc2dc Workspace Volume \ub610\ub294 Data Volume\uc73c\ub85c \uc9c0\uc815\ud574\ub193\uc740 \uacbd\ub85c \uc678\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \ucd08\uae30\ud654\ub418\ub294 \uac83\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("br",{parentName:"p"}),"\n","\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \ub2f9\uc2dc \uacbd\ub85c\ub97c \ubcc0\uacbd\ud558\uc9c0 \uc54a\uc558\ub2e4\uba74, \ub514\ud3f4\ud2b8(Default) Workspace Volume\uc758 \uacbd\ub85c\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc774\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc758 \ud558\uc704 \uacbd\ub85c \uc774\uc678\uc758 \uacbd\ub85c\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \uc0ac\ub77c\uc9d1\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"STOP")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84\uac00 \uc911\ub2e8\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-stop",src:a(6320).Z,width:"1832",height:"1014"})),(0,r.kt)("p",null,"\uc911\ub2e8\uc774 \uc644\ub8cc\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"CONNECT")," \ubc84\ud2bc\uc774 \ube44\ud65c\uc131\ud654\ub418\uba70, ",(0,r.kt)("inlineCode",{parentName:"p"},"PLAY")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc2dc \uc815\uc0c1\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-restart",src:a(6421).Z,width:"1888",height:"932"})))}m.isMDXComponent=!0},3516:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},1325:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},8112:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},6316:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1286:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},323:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},1710:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},1579:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},6421:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},6320:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},5668:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file diff --git a/assets/js/8b8d160d.f2d83f5a.js b/assets/js/8b8d160d.502b91fd.js similarity index 99% rename from assets/js/8b8d160d.f2d83f5a.js rename to assets/js/8b8d160d.502b91fd.js index 3b8387f1..6964f878 100644 --- a/assets/js/8b8d160d.f2d83f5a.js +++ b/assets/js/8b8d160d.502b91fd.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5787],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(a),k=r,d=c["".concat(i,".").concat(k)]||c[k]||m[k]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=k;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:r,l[1]=s;for(var p=2;p{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"version-1.0/setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/docs/1.0/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-prerequisite.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/docs/1.0/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},p=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],u={toc:p},c="wrapper";function m(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\uc774 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc124\uce58\ud558\uae30\uc5d0 \uc55e\uc11c, ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \uc124\uce58 \ud639\uc740 \uc124\uc815\ud574\ub450\uc5b4\uc57c \ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc5d0 \ub300\ud55c \ub9e4\ub274\uc5bc\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"\ucd94\ud6c4 \ud074\ub77c\uc774\uc5b8\ud2b8\uc640 \ud074\ub7ec\uc2a4\ud130\uc758 \uc6d0\ud65c\ud55c \ud1b5\uc2e0\uc744 \uc704\ud574\uc11c\ub294 Port-Forwarding\uc744 \uc218\ud589\ud574\uc57c \ud560 \uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4.\nPort-Forwarding\uc744 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0 \ub2e4\uc74c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4 \uc124\uce58\uc5d0 \ud544\uc694\ud55c APT \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uc758 \uacf5\uc2dd GPG key\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"apt \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc800\ub85c \ub3c4\ucee4\ub97c \uc124\uce58\ud560 \ub54c, stable Repository\uc5d0\uc11c \ubc1b\uc544\uc624\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \uc124\uce58\ud560 \uc218 \uc788\ub294 \ub3c4\ucee4 \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"\ucd9c\ub825\ub418\ub294 \ubc84\uc804 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc774 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\uac00 \ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc758 \ub3c4\ucee4\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"docker \uad00\ub828 command\ub97c sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud558\ub3c4\ub85d \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uad8c\ud55c\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 docker command\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ub41c \uac83\uc744 \ud655\uc778\ud558\uae30 \uc704\ud574, \ub2e4\uc2dc \ud55c\ubc88 docker run\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uad8c\ud55c\uc774 \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"kubelet \uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uac8c \ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uc5d0\uc11c swap\uc774\ub77c\uace0 \ubd88\ub9ac\ub294 \uac00\uc0c1\uba54\ubaa8\ub9ac\ub97c \uaebc \ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 swap\uc744 \uaebc \ub461\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(\ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8\ub97c \uac19\uc740 \ub370\uc2a4\ud06c\ud1b1\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ub54c swap \uba54\ubaa8\ub9ac\ub97c \uc885\ub8cc\ud558\uba74 \uc18d\ub3c4\uc758 \uc800\ud558\uac00 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4)")," "),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl \uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 API\ub97c \uc694\uccad\ud560 \ub54c \uc0ac\uc6a9\ud558\ub294 \ud074\ub77c\uc774\uc5b8\ud2b8 \ud234\uc785\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0 \uc124\uce58\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kubectl v1.21.7 \ubc84\uc804\uc744 \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kubectl \uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \ud30c\uc77c\uc758 \uad8c\ud55c\uacfc \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc5ec\ub7ec \uac1c\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, \uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c\uc744 \uad00\ub9ac\ud574\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c \ud639\uc740 \uc5ec\ub7ec \uac1c\uc758 kube-context\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ubc29\ubc95\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \ubb38\uc11c\ub97c \ucc38\uace0\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"https://github.com/ahmetb/kubectx"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/docs/tasks/tools/install-kubectl-linux/"},"\ub9ac\ub205\uc2a4\uc5d0 kubectl \uc124\uce58 \ubc0f \uc124\uc815"))))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5787],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(a),k=r,d=c["".concat(i,".").concat(k)]||c[k]||m[k]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=k;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:r,l[1]=s;for(var p=2;p{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"version-1.0/setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/docs/1.0/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-prerequisite.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/docs/1.0/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},p=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],u={toc:p},c="wrapper";function m(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\uc774 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc124\uce58\ud558\uae30\uc5d0 \uc55e\uc11c, ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \uc124\uce58 \ud639\uc740 \uc124\uc815\ud574\ub450\uc5b4\uc57c \ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc5d0 \ub300\ud55c \ub9e4\ub274\uc5bc\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"\ucd94\ud6c4 \ud074\ub77c\uc774\uc5b8\ud2b8\uc640 \ud074\ub7ec\uc2a4\ud130\uc758 \uc6d0\ud65c\ud55c \ud1b5\uc2e0\uc744 \uc704\ud574\uc11c\ub294 Port-Forwarding\uc744 \uc218\ud589\ud574\uc57c \ud560 \uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4.\nPort-Forwarding\uc744 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0 \ub2e4\uc74c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4 \uc124\uce58\uc5d0 \ud544\uc694\ud55c APT \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uc758 \uacf5\uc2dd GPG key\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"apt \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc800\ub85c \ub3c4\ucee4\ub97c \uc124\uce58\ud560 \ub54c, stable Repository\uc5d0\uc11c \ubc1b\uc544\uc624\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \uc124\uce58\ud560 \uc218 \uc788\ub294 \ub3c4\ucee4 \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"\ucd9c\ub825\ub418\ub294 \ubc84\uc804 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc774 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\uac00 \ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc758 \ub3c4\ucee4\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"docker \uad00\ub828 command\ub97c sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud558\ub3c4\ub85d \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uad8c\ud55c\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 docker command\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ub41c \uac83\uc744 \ud655\uc778\ud558\uae30 \uc704\ud574, \ub2e4\uc2dc \ud55c\ubc88 docker run\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uad8c\ud55c\uc774 \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"kubelet \uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uac8c \ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uc5d0\uc11c swap\uc774\ub77c\uace0 \ubd88\ub9ac\ub294 \uac00\uc0c1\uba54\ubaa8\ub9ac\ub97c \uaebc \ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 swap\uc744 \uaebc \ub461\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(\ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8\ub97c \uac19\uc740 \ub370\uc2a4\ud06c\ud1b1\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ub54c swap \uba54\ubaa8\ub9ac\ub97c \uc885\ub8cc\ud558\uba74 \uc18d\ub3c4\uc758 \uc800\ud558\uac00 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4)")," "),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl \uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 API\ub97c \uc694\uccad\ud560 \ub54c \uc0ac\uc6a9\ud558\ub294 \ud074\ub77c\uc774\uc5b8\ud2b8 \ud234\uc785\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0 \uc124\uce58\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kubectl v1.21.7 \ubc84\uc804\uc744 \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kubectl \uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \ud30c\uc77c\uc758 \uad8c\ud55c\uacfc \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc5ec\ub7ec \uac1c\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, \uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c\uc744 \uad00\ub9ac\ud574\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c \ud639\uc740 \uc5ec\ub7ec \uac1c\uc758 kube-context\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ubc29\ubc95\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \ubb38\uc11c\ub97c \ucc38\uace0\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"https://github.com/ahmetb/kubectx"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/docs/tasks/tools/install-kubectl-linux/"},"\ub9ac\ub205\uc2a4\uc5d0 kubectl \uc124\uce58 \ubc0f \uc124\uc815"))))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8c6322ce.cc644628.js b/assets/js/8c6322ce.a17642cf.js similarity index 99% rename from assets/js/8c6322ce.cc644628.js rename to assets/js/8c6322ce.a17642cf.js index 666912fc..372daa07 100644 --- a/assets/js/8c6322ce.cc644628.js +++ b/assets/js/8c6322ce.a17642cf.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5604],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>g});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function p(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function o(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),d=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):o(o({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(l.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,p=t.originalType,l=t.parentName,s=i(t,["components","mdxType","originalType","parentName"]),u=d(n),m=r,g=u["".concat(l,".").concat(m)]||u[m]||c[m]||p;return n?a.createElement(g,o(o({ref:e},s),{},{components:n})):a.createElement(g,o({ref:e},s))}));function g(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var p=n.length,o=new Array(p);o[0]=m;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[u]="string"==typeof t?t:r,o[1]=i;for(var d=2;d{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>o,default:()=>c,frontMatter:()=>p,metadata:()=>i,toc:()=>d});var a=n(7462),r=(n(7294),n(3905));const p={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"kubeflow/how-to-debug",id:"kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/docs/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/docs/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/how-to-debug.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/docs/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/docs/api-deployment/what-is-api-deployment"}},l={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...p}=t;return(0,r.kt)(u,(0,a.Z)({},s,p,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 Kubeflow \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub514\ubc84\uae45\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"failed-component"},"Failed Component"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," \uc5d0\uc11c \uc774\uc6a9\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc870\uae08 \uc218\uc815\ud574\uc11c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud328\ud558\ub3c4\ub85d \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubcc0\uacbd\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,r.kt)("p",null,"\uc218\uc815\ud55c \uc810\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc624\ub294 ",(0,r.kt)("inlineCode",{parentName:"li"},"load_iris_data")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," \ud53c\ucc98\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"li"},"None")," \uac12\uc744 \uc8fc\uc785"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"drop_na()")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 na \uac12\uc774 \ud3ec\ud568\ub41c ",(0,r.kt)("inlineCode",{parentName:"li"},"row"),"\ub97c \uc81c\uac70")),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc2e4\ud589 \ud6c4 Run\uc744 \ub20c\ub7ec\uc11c \ud655\uc778\ud574\ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"Train from csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc2e4\ud328\ud588\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-0.png",src:n(9789).Z,width:"2826",height:"1790"})),(0,r.kt)("p",null,"\uc2e4\ud328\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uc11c \uc2e4\ud328\ud55c \uc774\uc720\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-2.png",src:n(5904).Z,width:"2826",height:"1796"})),(0,r.kt)("p",null,"\ub85c\uadf8\ub97c \ud655\uc778\ud558\uba74 \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uc774\uc5ec\uc11c \uc2e4\ud589\ub418\uc9c0 \uc54a\uc558\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ubd84\uba85 \uc815\uc0c1\uc801\uc73c\ub85c \ub370\uc774\ud130\ub97c \uc804\ub2ec\ud588\ub294\ub370 \uc65c \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uac1c\uc77c\uae4c\uc694? "),(0,r.kt)("p",null,"\uc774\uc81c \uc785\ub825\ubc1b\uc740 \ub370\uc774\ud130\uc5d0 \uc5b4\ub5a4 \ubb38\uc81c\uac00 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 Input/Ouput \ud0ed\uc5d0\uc11c \uc785\ub825\uac12\uc73c\ub85c \ub4e4\uc5b4\uac04 \ub370\uc774\ud130\ub4e4\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc6b4\ub85c\ub4dc\ub294 \ube68\uac04\uc0c9 \ub124\ubaa8\ub85c \ud45c\uc2dc\ub41c \uacf3\uc758 \ub9c1\ud06c\ub97c \ud074\ub9ad\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-5.png",src:n(5801).Z,width:"2690",height:"1740"})),(0,r.kt)("p",null,"\ub450 \uac1c\uc758 \ud30c\uc77c\uc744 \uac19\uc740 \uacbd\ub85c\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub9ac\uace0 \ud574\ub2f9 \uacbd\ub85c\ub85c \uc774\ub3d9\ud574\uc11c \ud30c\uc77c\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ud30c\uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,r.kt)("p",null,"\uc555\ucd95\uc744 \ud480\uc5b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 \uc774\ub97c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc744 \uc774\uc6a9\ud574 \ucef4\ud3ec\ub10c\ud2b8 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-3.png",src:n(8693).Z,width:"2434",height:"1690"})),(0,r.kt)("p",null,"\ub514\ubc84\uae45\uc744 \ud574\ubcf8 \uacb0\uacfc dropna \ud560 \ub54c column\uc744 \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc57c \ud558\ub294\ub370 row\ub97c \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc11c \ub370\uc774\ud130\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.\n\uc774\uc81c \ubb38\uc81c\uc758 \uc6d0\uc778\uc744 \uc54c\uc544\ub0c8\uc73c\ub2c8 column\uc744 \uae30\uc900\uc73c\ub85c drop\uc774 \ub418\uac8c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,r.kt)("p",null,"\uc218\uc815 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub2e4\uc2dc \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-6.png",src:n(6157).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},9789:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},5904:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},8693:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},5801:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},6157:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5604],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>g});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function p(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function o(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),d=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):o(o({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(l.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,p=t.originalType,l=t.parentName,s=i(t,["components","mdxType","originalType","parentName"]),u=d(n),m=r,g=u["".concat(l,".").concat(m)]||u[m]||c[m]||p;return n?a.createElement(g,o(o({ref:e},s),{},{components:n})):a.createElement(g,o({ref:e},s))}));function g(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var p=n.length,o=new Array(p);o[0]=m;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[u]="string"==typeof t?t:r,o[1]=i;for(var d=2;d{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>o,default:()=>c,frontMatter:()=>p,metadata:()=>i,toc:()=>d});var a=n(7462),r=(n(7294),n(3905));const p={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"kubeflow/how-to-debug",id:"kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/docs/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/docs/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/how-to-debug.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/docs/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/docs/api-deployment/what-is-api-deployment"}},l={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...p}=t;return(0,r.kt)(u,(0,a.Z)({},s,p,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 Kubeflow \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub514\ubc84\uae45\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"failed-component"},"Failed Component"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," \uc5d0\uc11c \uc774\uc6a9\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc870\uae08 \uc218\uc815\ud574\uc11c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud328\ud558\ub3c4\ub85d \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubcc0\uacbd\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,r.kt)("p",null,"\uc218\uc815\ud55c \uc810\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc624\ub294 ",(0,r.kt)("inlineCode",{parentName:"li"},"load_iris_data")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," \ud53c\ucc98\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"li"},"None")," \uac12\uc744 \uc8fc\uc785"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"drop_na()")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 na \uac12\uc774 \ud3ec\ud568\ub41c ",(0,r.kt)("inlineCode",{parentName:"li"},"row"),"\ub97c \uc81c\uac70")),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc2e4\ud589 \ud6c4 Run\uc744 \ub20c\ub7ec\uc11c \ud655\uc778\ud574\ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"Train from csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc2e4\ud328\ud588\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-0.png",src:n(9789).Z,width:"2826",height:"1790"})),(0,r.kt)("p",null,"\uc2e4\ud328\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uc11c \uc2e4\ud328\ud55c \uc774\uc720\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-2.png",src:n(5904).Z,width:"2826",height:"1796"})),(0,r.kt)("p",null,"\ub85c\uadf8\ub97c \ud655\uc778\ud558\uba74 \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uc774\uc5ec\uc11c \uc2e4\ud589\ub418\uc9c0 \uc54a\uc558\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ubd84\uba85 \uc815\uc0c1\uc801\uc73c\ub85c \ub370\uc774\ud130\ub97c \uc804\ub2ec\ud588\ub294\ub370 \uc65c \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uac1c\uc77c\uae4c\uc694? "),(0,r.kt)("p",null,"\uc774\uc81c \uc785\ub825\ubc1b\uc740 \ub370\uc774\ud130\uc5d0 \uc5b4\ub5a4 \ubb38\uc81c\uac00 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 Input/Ouput \ud0ed\uc5d0\uc11c \uc785\ub825\uac12\uc73c\ub85c \ub4e4\uc5b4\uac04 \ub370\uc774\ud130\ub4e4\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc6b4\ub85c\ub4dc\ub294 \ube68\uac04\uc0c9 \ub124\ubaa8\ub85c \ud45c\uc2dc\ub41c \uacf3\uc758 \ub9c1\ud06c\ub97c \ud074\ub9ad\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-5.png",src:n(5801).Z,width:"2690",height:"1740"})),(0,r.kt)("p",null,"\ub450 \uac1c\uc758 \ud30c\uc77c\uc744 \uac19\uc740 \uacbd\ub85c\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub9ac\uace0 \ud574\ub2f9 \uacbd\ub85c\ub85c \uc774\ub3d9\ud574\uc11c \ud30c\uc77c\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ud30c\uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,r.kt)("p",null,"\uc555\ucd95\uc744 \ud480\uc5b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 \uc774\ub97c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc744 \uc774\uc6a9\ud574 \ucef4\ud3ec\ub10c\ud2b8 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-3.png",src:n(8693).Z,width:"2434",height:"1690"})),(0,r.kt)("p",null,"\ub514\ubc84\uae45\uc744 \ud574\ubcf8 \uacb0\uacfc dropna \ud560 \ub54c column\uc744 \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc57c \ud558\ub294\ub370 row\ub97c \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc11c \ub370\uc774\ud130\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.\n\uc774\uc81c \ubb38\uc81c\uc758 \uc6d0\uc778\uc744 \uc54c\uc544\ub0c8\uc73c\ub2c8 column\uc744 \uae30\uc900\uc73c\ub85c drop\uc774 \ub418\uac8c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,r.kt)("p",null,"\uc218\uc815 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub2e4\uc2dc \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-6.png",src:n(6157).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},9789:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},5904:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},8693:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},5801:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},6157:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file diff --git a/assets/js/8dd8b1e3.be1bde8a.js b/assets/js/8dd8b1e3.02ae5002.js similarity index 99% rename from assets/js/8dd8b1e3.be1bde8a.js rename to assets/js/8dd8b1e3.02ae5002.js index 31966a0c..4ea11be8 100644 --- a/assets/js/8dd8b1e3.be1bde8a.js +++ b/assets/js/8dd8b1e3.02ae5002.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8425],{3905:(e,t,n)=>{n.d(t,{Zo:()=>k,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),c=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},k=function(e){var t=c(e.components);return r.createElement(p.Provider,{value:t},e.children)},d="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),d=c(n),u=a,m=d["".concat(p,".").concat(u)]||d[u]||s[u]||l;return n?r.createElement(m,o(o({ref:t},k),{},{components:n})):r.createElement(m,o({ref:t},k))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>s,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var r=n(7462),a=(n(7294),n(3905));const l={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/advanced",id:"prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/docs/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/docs/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/advanced.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/docs/prerequisites/docker/images"}},p={},c=[{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30",level:2},{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Docker tag \uc774\ub984 \uc9d3\uae30",id:"docker-tag-\uc774\ub984-\uc9d3\uae30",level:3},{value:"ETC",id:"etc",level:3},{value:"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158",id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158",level:2},{value:"docker run with volume",id:"docker-run-with-volume",level:3},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"docker run as a background process",id:"docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],k={toc:c},d="wrapper";function s(e){let{components:t,...l}=e;return(0,a.kt)(d,(0,r.Z)({},k,l,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30"),(0,a.kt)("h3",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810"),(0,a.kt)("p",null,"Dockerfile \uc744 \ud65c\uc6a9\ud558\uc5ec \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c\ub294 \uba85\ub839\uc5b4\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\uc21c\uc11c"),"\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \uc5ec\ub7ec \uac1c\uc758 Read-Only Layer \ub85c \uad6c\uc131\ub418\uc5b4\uc788\uace0, \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud560 \ub54c \uc774\ubbf8 \uc874\uc7ac\ud558\ub294 \ub808\uc774\uc5b4\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uce90\uc2dc\ub418\uc5b4")," \uc7ac\uc0ac\uc6a9\ub418\uae30 \ub54c\ubb38\uc5d0, \uc774\ub97c \uc0dd\uac01\ud574\uc11c Dockerfile \uc744 \uad6c\uc131\ud55c\ub2e4\uba74 ",(0,a.kt)("strong",{parentName:"p"},"\ube4c\ub4dc \uc2dc\uac04\uc744 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"Dockerfile\uc5d0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uba85\ub839\uc5b4 \ud558\ub098\uac00 \ud558\ub098\uc758 \ub808\uc774\uc5b4\ub85c \uc800\uc7a5\ub429\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\ub85c \ube4c\ub4dc\ub41c \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash")," \uba85\ub839\uc5b4\ub85c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ub808\uc774\uc5b4\ub85c \ud45c\ud604\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"layers.png",src:n(6045).Z,width:"1080",height:"612"})),(0,a.kt)("p",null,"\ucd5c\uc0c1\ub2e8\uc758 R/W Layer \ub294 \uc774\ubbf8\uc9c0\uc5d0 \uc601\ud5a5\uc744 \uc8fc\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ub0b4\uc5ed\uc740 \ubaa8\ub450 \ud718\ubc1c\uc131\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub2e8\uc758 \ub808\uc774\uc5b4\uac00 \ubcc0\uacbd\ub418\uba74, \uadf8 \uc704\uc758 \ub808\uc774\uc5b4\ub294 \ubaa8\ub450 \uc0c8\ub85c \ube4c\ub4dc\ub429\ub2c8\ub2e4. \uadf8\ub798\uc11c Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4\uc758 \uc21c\uc11c\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uba74, ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc \ubcc0\uacbd"),"\ub418\ub294 \ubd80\ubd84\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ucd5c\ub300\ud55c \ub4a4\ucabd\uc73c\ub85c")," \uc815\ub82c\ud558\ub294 \uac83\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. (ex. ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,a.kt)("p",null,"\uadf8\ub807\uae30 \ub54c\ubb38\uc5d0 \ubc18\ub300\ub85c \ubcc0\uacbd\ub418\uc9c0 \uc54a\ub294 \ubd80\ubd84\uc740 \ucd5c\ub300\ud55c \uc55e\ucabd\uc73c\ub85c \uc815\ub82c\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub9cc\uc57d \uac70\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\ubcc0\uacbd\ub418\uc9c0 \uc54a\uc9c0\ub9cc"),", \uc5ec\ub7ec \uacf3\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc")," \uc4f0\uc774\ub294 \ubd80\ubd84\uc744 \uacf5\ud1b5\ud654\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud574\ub2f9 \uacf5\ud1b5\ubd80\ubd84\ub9cc \ubb36\uc5b4\uc11c \ubcc4\ub3c4\uc758 \uc774\ubbf8\uc9c0\ub294 \ubbf8\ub9ac \ub9cc\ub4e4\uc5b4\ub454 \ub2e4\uc74c, ",(0,a.kt)("strong",{parentName:"p"},"\ubca0\uc774\uc2a4 \uc774\ubbf8\uc9c0")," \ub85c \ud65c\uc6a9\ud558\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\ub978 \uac74 \uac70\uc758 \ub611\uac19\uc740\ub370, tensorflow-cpu \ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\ubbf8\uc9c0\uc640, tensorflow-gpu \ub97c \uc0ac\uc6a9\ud558\ub294 \ud658\uacbd\uc744 \ubd84\ub9ac\ud574\uc11c \uc774\ubbf8\uc9c0\ub85c \ub9cc\ub4e4\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","python \uacfc \uae30\ud0c0 \uae30\ubcf8\uc801\uc778 \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub41c ",(0,a.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,a.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," \ub97c \ub9cc\ub4e4\uc5b4\ub450\uace0, ",(0,a.kt)("strong",{parentName:"p"},"tensorflow cpu \ubc84\uc804\uacfc gpu \ubc84\uc804\uc774")," \uc124\uce58\ub41c \uc774\ubbf8\uc9c0 \uc0c8\ub85c \ub9cc\ub4e4\ub54c\ub294, \uc704\uc758 \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"FROM")," \uc73c\ub85c \ubd88\ub7ec\uc628 \ub2e4\uc74c, tensorflow install \ud558\ub294 \ubd80\ubd84\ub9cc \ubcc4\ub3c4\ub85c \uc791\uc131\ud574\uc11c Dockerfile \uc744 2 \uac1c\ub85c \uad00\ub9ac\ud55c\ub2e4\uba74 \uac00\ub3c5\uc131\ub3c4 \uc88b\uace0 \ube4c\ub4dc \uc2dc\uac04\ub3c4 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"\ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \ud569\uce58\ub294 \uac83"),"\uc774 Old version \uc758 \ub3c4\ucee4\uc5d0\uc11c\ub294 \uc131\ub2a5 \ud5a5\uc0c1 \ud6a8\uacfc\ub97c \uc774\ub04c\uc5c8\uc2b5\ub2c8\ub2e4. \uc5ec\ub7ec\ubd84\uc758 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 \uc5b4\ub5a4 \ub3c4\ucee4 \ubc84\uc804\uc5d0\uc11c \uc2e4\ud589\ub420 \uac83\uc778\uc9c0 \ubcf4\uc7a5\ud560 \uc218 \uc5c6\uc73c\uba70, ",(0,a.kt)("strong",{parentName:"p"},"\uac00\ub3c5\uc131"),"\uc744 \uc704\ud574\uc11c\ub3c4 \ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \uc801\uc808\ud788 \ud569\uce58\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ub41c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,a.kt)("p",null,"\uc774\ub97c \uc544\ub798\uc640 \uac19\uc774 \ud569\uccd0\uc11c \uc801\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,a.kt)("p",null,"\ud3b8\uc758\ub97c \uc704\ud574\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore")," \ub3c4 \uc0ac\uc6a9\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".gitignore")," \uc640 \ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud55c\ub2e4\uace0 \uc774\ud574\ud558\uba74 \ub429\ub2c8\ub2e4. (git add \ud560 \ub54c \uc81c\uc678\ud560 \uc218 \uc788\ub4ef\uc774, docker build \ud560 \ub54c \uc790\ub3d9\uc73c\ub85c \uc81c\uc678)"),(0,a.kt)("p",null,"\ub354 \ub9ce\uc740 \uc815\ubcf4\ub294 ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker \uacf5\uc2dd \ubb38\uc11c"),"\uc5d0\uc11c \ud655\uc778\ud558\uc2e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \ub294 \ubaa8\ub450 \ucee8\ud14c\uc774\ub108\uc758 \uc2e4\ud589 \uc2dc\uc810\uc5d0\uc11c \uc5b4\ub5a4 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\uc2dc\ud0a4\uace0 \uc2f6\uc744 \ub54c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \uc774 \ub458 \uc911 \ud558\ub098\ub294 \ubc18\ub4dc\uc2dc \uc874\uc7ac\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"\ucc28\uc774\uc810"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD"),": docker run \uc744 \uc218\ud589\ud560 \ub54c, \uc27d\uac8c \ubcc0\uacbd\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc74c"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": ",(0,a.kt)("inlineCode",{parentName:"li"},"--entrypoint")," \ub97c \uc0ac\uc6a9\ud574\uc57c \ubcc0\uacbd\ud560 \uc218 \uc788\uc74c")))),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \uac00 \ud568\uaed8 \uc4f0\uc77c \ub54c\ub294 \ubcf4\ud1b5 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc5d0\uc11c \uc801\uc740 \uba85\ub839\uc758 arguments(parameters) \ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc5d0\uc11c \uc8fc\uc11d\uc73c\ub85c \ud45c\uc2dc\ub41c \ubd80\ubd84\ub4e4\uc744 \ud574\uc81c\ud558\uba70 \ube4c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null}),(0,a.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"NO CMD")),(0,a.kt)("td",{parentName:"tr",align:null},"Error!"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,a.kt)("td",{parentName:"tr",align:null},"x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD x y")),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"In Kubernetes pod",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," \u2192 command"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD")," \u2192 args")))),(0,a.kt)("h3",{id:"docker-tag-\uc774\ub984-\uc9d3\uae30"},"Docker tag \uc774\ub984 \uc9d3\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc758 tag \ub85c ",(0,a.kt)("strong",{parentName:"p"},"latest \ub294 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5"),"\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc720\ub294 latest \ub294 default tag name \uc774\ubbc0\ub85c ",(0,a.kt)("strong",{parentName:"p"},"\uc758\ub3c4\uce58 \uc54a\uac8c overwritten")," \ub418\ub294 \uacbd\uc6b0\uac00 \ub108\ubb34 \ub9ce\uc774 \ubc1c\uc0dd\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub098\uc758 \uc774\ubbf8\uc9c0\ub294 \ud558\ub098\uc758 \ud0dc\uadf8\ub97c \uac00\uc9d0(",(0,a.kt)("strong",{parentName:"p"},"uniqueness"),")\uc744 \ubcf4\uc7a5\ud574\uc57c \ucd94\ud6c4 Production \ub2e8\uacc4\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\ud611\uc5c5/\ub514\ubc84\uae45"),"\uc5d0 \uc6a9\uc774\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub0b4\uc6a9\uc740 \ub2e4\ub974\uc9c0\ub9cc, \ub3d9\uc77c\ud55c tag \ub97c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 \ucd94\ud6c4 dangling image \ub85c \ucde8\uae09\ub418\uc5b4 \uad00\ub9ac\ud558\uae30 \uc5b4\ub824\uc6cc\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","dangling image\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker images"),"\uc5d0\ub294 \ub098\uc624\uc9c0 \uc54a\uc9c0\ub9cc \uacc4\uc18d\ud574\uc11c \uc800\uc7a5\uc18c\ub97c \ucc28\uc9c0\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"etc"},"ETC"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"log \ub4f1\uc758 \uc815\ubcf4\ub294 container \ub0b4\ubd80\uac00 \uc544\ub2cc \uacf3\uc5d0 \ub530\ub85c \uc800\uc7a5\ud569\ub2c8\ub2e4.\ncontainer \ub0b4\ubd80\uc5d0\uc11c write \ud55c data \ub294 \uc5b8\uc81c\ub4e0\uc9c0 \uc0ac\ub77c\uc9c8 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"secret \ud55c \uc815\ubcf4, \ud658\uacbd(dev/prod) dependent \ud55c \uc815\ubcf4 \ub4f1\uc740 Dockerfile \uc5d0 \uc9c1\uc811 \uc801\ub294 \uac8c \uc544\ub2c8\ub77c, env var \ub610\ub294 .env config file \uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"Dockerfile ",(0,a.kt)("strong",{parentName:"li"},"linter")," \ub3c4 \uc874\uc7ac\ud558\ubbc0\ub85c, \ud611\uc5c5 \uc2dc\uc5d0\ub294 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,a.kt)("h2",{id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158"},"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158"),(0,a.kt)("h3",{id:"docker-run-with-volume"},"docker run with volume"),(0,a.kt)("p",null,"Docker container \uc0ac\uc6a9 \uc2dc \ubd88\ud3b8\ud55c \uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.\n\ubc14\ub85c Docker\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ubaa8\ub4e0 \uc0ac\ud56d\uc740 \uc800\uc7a5\ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc774\uc720\ub294 Docker container \ub294 \uac01\uac01 \uaca9\ub9ac\ub41c \ud30c\uc77c\uc2dc\uc2a4\ud15c\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c, ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec docker container \ub07c\ub9ac \ub370\uc774\ud130\ub97c \uacf5\uc720\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c Docker\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"2 \uac00\uc9c0"),"\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"storage.png",src:n(5940).Z,width:"501",height:"255"})),(0,a.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"docker cli \ub97c \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"li"},"volume")," \uc774\ub77c\ub294 \ub9ac\uc18c\uc2a4\ub97c \uc9c1\uc811 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"host \uc5d0\uc11c Docker area(",(0,a.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") \uc544\ub798\uc5d0 \ud2b9\uc815 \ub514\ub809\ud1a0\ub9ac\ub97c \uc0dd\uc131\ud55c \ub2e4\uc74c, \ud574\ub2f9 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"host \uc758 \ud2b9\uc815 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"how-to-use"},"How to use?"),(0,a.kt)("p",null,"\uc0ac\uc6a9 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ub3d9\uc77c\ud55c \uc778\ud130\ud398\uc774\uc2a4"),"\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"-v")," \uc635\uc158\uc744 \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc, volume \uc744 \uc0ac\uc6a9\ud560 \ub54c\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume rm")," \ub4f1\uc744 \uc218\ud589\ud558\uc5ec \uc9c1\uc811 \uad00\ub9ac\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Docker volume"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Blind mount"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,a.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uac1c\ubc1c\ud560 \ub54c\ub294 bind mount \uac00 \ud3b8\ud558\uae34 \ud558\uc9c0\ub9cc, \ud658\uacbd\uc744 \uae54\ub054\ud558\uac8c \uc720\uc9c0\ud558\uace0 \uc2f6\ub2e4\uba74 docker volume \uc744 \uc0ac\uc6a9\ud558\uc5ec create, rm \uc744 \uba85\uc2dc\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\ub3c4 \ud558\ub098\uc758 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\ub3c4 \uacb0\uad6d docker \uc758 bind mount \ub97c \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-resource-limit"},"docker run with resource limit"),(0,a.kt)("p",null,"\uae30\ubcf8\uc801\uc73c\ub85c docker container \ub294 ",(0,a.kt)("strong",{parentName:"p"},"host OS \uc758 cpu, memory \uc790\uc6d0\uc744 fully \uc0ac\uc6a9"),"\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc774\ub807\uac8c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 host OS \uc758 \uc790\uc6d0 \uc0c1\ud669\uc5d0 \ub530\ub77c\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OOM")," \ub4f1\uc758 \uc774\uc288\ub85c docker container \uac00 \ube44\uc815\uc0c1\uc801\uc73c\ub85c \uc885\ub8cc\ub418\ub294 \uc0c1\ud669\uc774 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7f0 \ubb38\uc81c\ub97c \ub2e4\ub8e8\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"docker container \uc2e4\ud589 \uc2dc, cpu \uc640 memory \uc758 \uc0ac\uc6a9\ub7c9 \uc81c\ud55c"),"\uc744 \uac78 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"\uc635\uc158"),"\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,a.kt)("p",null,"\uc704\uc758 \ub3c4\ucee4\ub97c \uc2e4\ud589 \ud6c4 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker stats")," \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \uc0ac\uc6a9\ub7c9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c pod \ub77c\ub294 \ub9ac\uc18c\uc2a4\uc5d0 cpu, memory \uc81c\ud55c\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,a.kt)("p",null,"\ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uac00 \uacc4\uc18d\ud574\uc11c running \uc0c1\ud0dc\ub97c \uc720\uc9c0\uc2dc\ucf1c\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774\ub7f0 \uacbd\uc6b0\ub97c \uc704\ud574\uc11c \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc885\ub8cc\ub418\uc790\ub9c8\uc790 \ubc14\ub85c \uc7ac\uc0dd\uc131\uc744 \uc2dc\ub3c4\ud560 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"--restart=always")," \uc635\uc158\uc744 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc635\uc158 \uc785\ub825 \ud6c4 \ub3c4\ucee4\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps"),"\ub97c \ud1b5\ud574 \uc7ac\uc2e4\ud589\uc774 \ub418\uace0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.\n\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uace0 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 STATUS\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"on-failure with max retries"),(0,a.kt)("li",{parentName:"ul"},"always \ub4f1\uc758 \uc120\ud0dd\uc9c0 \uc81c\uacf5")))),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c job \uc774\ub77c\ub294 resource \uc758 restart \uc635\uc158\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-as-a-background-process"},"docker run as a background process"),(0,a.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud560 \ub54c\ub294 \uae30\ubcf8\uc801\uc73c\ub85c foreground process \ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud55c \ud130\ubbf8\ub110\uc774 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0 \uc790\ub3d9\uc73c\ub85c attach \ub418\uc5b4 \uc788\uc5b4, \ub2e4\ub978 \uba85\ub839\uc744 \uc2e4\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc608\uc2dc\ub97c \uc218\ud589\ud574\ubd05\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ud130\ubbf8\ub110 2 \uac1c\ub97c \uc5f4\uc5b4, \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker ps")," \ub97c \uc9c0\ucf1c\ubcf4\uace0, \ub2e4\ub978 \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc744 \ucc28\ub840\ub85c \uc2e4\ud589\ud574\ubcf4\uba70 \ub3d9\uc791\uc744 \uc9c0\ucf1c\ubd05\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"first-practice"},"First Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"10 \ucd08\ub3d9\uc548 \uba48\ucdb0 \uc788\uc5b4\uc57c \ud558\uace0, \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. 10\ucd08 \ub4a4\uc5d0\ub294 docker ps \uc5d0\uc11c container \uac00 \uc885\ub8cc\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"second-practice"},"Second Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"\uc774\ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + q")),(0,a.kt)("p",null,"\ud574\ub2f9 \ud130\ubbf8\ub110\uc5d0\uc11c \uc774\uc81c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc788\uac8c \ub418\uc5c8\uc73c\uba70, docker ps \ub85c\ub3c4 10\ucd08\uae4c\uc9c0\ub294 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc0b4\uc544\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub807\uac8c docker container \ub0b4\ubd80\uc5d0\uc11c \ube60\uc838\ub098\uc628 \uc0c1\ud669\uc744 detached \ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4.\n\ub3c4\ucee4\uc5d0\uc11c\ub294 run \uc744 \uc2e4\ud589\ud568\uacfc \ub3d9\uc2dc\uc5d0 detached mode \ub85c \uc2e4\ud589\uc2dc\ud0ac \uc218 \uc788\ub294 \uc635\uc158\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"third-practice"},"Third Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,a.kt)("p",null,"detached mode \uc774\ubbc0\ub85c \ud574\ub2f9 \uba85\ub839\uc744 \uc2e4\ud589\uc2dc\ud0a8 \ud130\ubbf8\ub110\uc5d0\uc11c \ub2e4\ub978 \uc561\uc158\uc744 \uc218\ud589\uc2dc\ud0ac \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc0c1\ud669\uc5d0 \ub530\ub77c detached mode \ub97c \uc801\uc808\ud788 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4, DB \uc640 \ud1b5\uc2e0\ud558\ub294 Backend API server \ub97c \uac1c\ubc1c\ud560 \ub54c Backend API server \ub294 source code \ub97c \ubcc0\uacbd\uc2dc\ucf1c\uac00\uba74\uc11c hot-loading \uc73c\ub85c \uacc4\uc18d\ud574\uc11c \ub85c\uadf8\ub97c \ud655\uc778\ud574\ubd10\uc57c \ud558\uc9c0\ub9cc, DB \ub294 \ub85c\uadf8\ub97c \uc9c0\ucf1c\ubcfc \ud544\uc694\ub294 \uc5c6\ub294 \uacbd\uc6b0\ub77c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","DB \ub294 docker container \ub97c detached mode \ub85c \uc2e4\ud589\uc2dc\ud0a4\uace0, Backend API server \ub294 attached mode \ub85c log \ub97c following \ud558\uba74\uc11c \uc2e4\ud589\uc2dc\ud0a4\uba74 \ud6a8\uc728\uc801\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}s.isMDXComponent=!0},6045:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},5940:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8425],{3905:(e,t,n)=>{n.d(t,{Zo:()=>k,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),c=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},k=function(e){var t=c(e.components);return r.createElement(p.Provider,{value:t},e.children)},d="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),d=c(n),u=a,m=d["".concat(p,".").concat(u)]||d[u]||s[u]||l;return n?r.createElement(m,o(o({ref:t},k),{},{components:n})):r.createElement(m,o({ref:t},k))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>s,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var r=n(7462),a=(n(7294),n(3905));const l={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/advanced",id:"prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/docs/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/docs/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/advanced.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/docs/prerequisites/docker/images"}},p={},c=[{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30",level:2},{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Docker tag \uc774\ub984 \uc9d3\uae30",id:"docker-tag-\uc774\ub984-\uc9d3\uae30",level:3},{value:"ETC",id:"etc",level:3},{value:"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158",id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158",level:2},{value:"docker run with volume",id:"docker-run-with-volume",level:3},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"docker run as a background process",id:"docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],k={toc:c},d="wrapper";function s(e){let{components:t,...l}=e;return(0,a.kt)(d,(0,r.Z)({},k,l,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30"),(0,a.kt)("h3",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810"),(0,a.kt)("p",null,"Dockerfile \uc744 \ud65c\uc6a9\ud558\uc5ec \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c\ub294 \uba85\ub839\uc5b4\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\uc21c\uc11c"),"\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \uc5ec\ub7ec \uac1c\uc758 Read-Only Layer \ub85c \uad6c\uc131\ub418\uc5b4\uc788\uace0, \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud560 \ub54c \uc774\ubbf8 \uc874\uc7ac\ud558\ub294 \ub808\uc774\uc5b4\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uce90\uc2dc\ub418\uc5b4")," \uc7ac\uc0ac\uc6a9\ub418\uae30 \ub54c\ubb38\uc5d0, \uc774\ub97c \uc0dd\uac01\ud574\uc11c Dockerfile \uc744 \uad6c\uc131\ud55c\ub2e4\uba74 ",(0,a.kt)("strong",{parentName:"p"},"\ube4c\ub4dc \uc2dc\uac04\uc744 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"Dockerfile\uc5d0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uba85\ub839\uc5b4 \ud558\ub098\uac00 \ud558\ub098\uc758 \ub808\uc774\uc5b4\ub85c \uc800\uc7a5\ub429\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\ub85c \ube4c\ub4dc\ub41c \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash")," \uba85\ub839\uc5b4\ub85c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ub808\uc774\uc5b4\ub85c \ud45c\ud604\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"layers.png",src:n(6045).Z,width:"1080",height:"612"})),(0,a.kt)("p",null,"\ucd5c\uc0c1\ub2e8\uc758 R/W Layer \ub294 \uc774\ubbf8\uc9c0\uc5d0 \uc601\ud5a5\uc744 \uc8fc\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ub0b4\uc5ed\uc740 \ubaa8\ub450 \ud718\ubc1c\uc131\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub2e8\uc758 \ub808\uc774\uc5b4\uac00 \ubcc0\uacbd\ub418\uba74, \uadf8 \uc704\uc758 \ub808\uc774\uc5b4\ub294 \ubaa8\ub450 \uc0c8\ub85c \ube4c\ub4dc\ub429\ub2c8\ub2e4. \uadf8\ub798\uc11c Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4\uc758 \uc21c\uc11c\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uba74, ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc \ubcc0\uacbd"),"\ub418\ub294 \ubd80\ubd84\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ucd5c\ub300\ud55c \ub4a4\ucabd\uc73c\ub85c")," \uc815\ub82c\ud558\ub294 \uac83\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. (ex. ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,a.kt)("p",null,"\uadf8\ub807\uae30 \ub54c\ubb38\uc5d0 \ubc18\ub300\ub85c \ubcc0\uacbd\ub418\uc9c0 \uc54a\ub294 \ubd80\ubd84\uc740 \ucd5c\ub300\ud55c \uc55e\ucabd\uc73c\ub85c \uc815\ub82c\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub9cc\uc57d \uac70\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\ubcc0\uacbd\ub418\uc9c0 \uc54a\uc9c0\ub9cc"),", \uc5ec\ub7ec \uacf3\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc")," \uc4f0\uc774\ub294 \ubd80\ubd84\uc744 \uacf5\ud1b5\ud654\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud574\ub2f9 \uacf5\ud1b5\ubd80\ubd84\ub9cc \ubb36\uc5b4\uc11c \ubcc4\ub3c4\uc758 \uc774\ubbf8\uc9c0\ub294 \ubbf8\ub9ac \ub9cc\ub4e4\uc5b4\ub454 \ub2e4\uc74c, ",(0,a.kt)("strong",{parentName:"p"},"\ubca0\uc774\uc2a4 \uc774\ubbf8\uc9c0")," \ub85c \ud65c\uc6a9\ud558\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\ub978 \uac74 \uac70\uc758 \ub611\uac19\uc740\ub370, tensorflow-cpu \ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\ubbf8\uc9c0\uc640, tensorflow-gpu \ub97c \uc0ac\uc6a9\ud558\ub294 \ud658\uacbd\uc744 \ubd84\ub9ac\ud574\uc11c \uc774\ubbf8\uc9c0\ub85c \ub9cc\ub4e4\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","python \uacfc \uae30\ud0c0 \uae30\ubcf8\uc801\uc778 \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub41c ",(0,a.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,a.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," \ub97c \ub9cc\ub4e4\uc5b4\ub450\uace0, ",(0,a.kt)("strong",{parentName:"p"},"tensorflow cpu \ubc84\uc804\uacfc gpu \ubc84\uc804\uc774")," \uc124\uce58\ub41c \uc774\ubbf8\uc9c0 \uc0c8\ub85c \ub9cc\ub4e4\ub54c\ub294, \uc704\uc758 \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"FROM")," \uc73c\ub85c \ubd88\ub7ec\uc628 \ub2e4\uc74c, tensorflow install \ud558\ub294 \ubd80\ubd84\ub9cc \ubcc4\ub3c4\ub85c \uc791\uc131\ud574\uc11c Dockerfile \uc744 2 \uac1c\ub85c \uad00\ub9ac\ud55c\ub2e4\uba74 \uac00\ub3c5\uc131\ub3c4 \uc88b\uace0 \ube4c\ub4dc \uc2dc\uac04\ub3c4 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"\ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \ud569\uce58\ub294 \uac83"),"\uc774 Old version \uc758 \ub3c4\ucee4\uc5d0\uc11c\ub294 \uc131\ub2a5 \ud5a5\uc0c1 \ud6a8\uacfc\ub97c \uc774\ub04c\uc5c8\uc2b5\ub2c8\ub2e4. \uc5ec\ub7ec\ubd84\uc758 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 \uc5b4\ub5a4 \ub3c4\ucee4 \ubc84\uc804\uc5d0\uc11c \uc2e4\ud589\ub420 \uac83\uc778\uc9c0 \ubcf4\uc7a5\ud560 \uc218 \uc5c6\uc73c\uba70, ",(0,a.kt)("strong",{parentName:"p"},"\uac00\ub3c5\uc131"),"\uc744 \uc704\ud574\uc11c\ub3c4 \ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \uc801\uc808\ud788 \ud569\uce58\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ub41c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,a.kt)("p",null,"\uc774\ub97c \uc544\ub798\uc640 \uac19\uc774 \ud569\uccd0\uc11c \uc801\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,a.kt)("p",null,"\ud3b8\uc758\ub97c \uc704\ud574\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore")," \ub3c4 \uc0ac\uc6a9\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".gitignore")," \uc640 \ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud55c\ub2e4\uace0 \uc774\ud574\ud558\uba74 \ub429\ub2c8\ub2e4. (git add \ud560 \ub54c \uc81c\uc678\ud560 \uc218 \uc788\ub4ef\uc774, docker build \ud560 \ub54c \uc790\ub3d9\uc73c\ub85c \uc81c\uc678)"),(0,a.kt)("p",null,"\ub354 \ub9ce\uc740 \uc815\ubcf4\ub294 ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker \uacf5\uc2dd \ubb38\uc11c"),"\uc5d0\uc11c \ud655\uc778\ud558\uc2e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \ub294 \ubaa8\ub450 \ucee8\ud14c\uc774\ub108\uc758 \uc2e4\ud589 \uc2dc\uc810\uc5d0\uc11c \uc5b4\ub5a4 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\uc2dc\ud0a4\uace0 \uc2f6\uc744 \ub54c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \uc774 \ub458 \uc911 \ud558\ub098\ub294 \ubc18\ub4dc\uc2dc \uc874\uc7ac\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"\ucc28\uc774\uc810"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD"),": docker run \uc744 \uc218\ud589\ud560 \ub54c, \uc27d\uac8c \ubcc0\uacbd\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc74c"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": ",(0,a.kt)("inlineCode",{parentName:"li"},"--entrypoint")," \ub97c \uc0ac\uc6a9\ud574\uc57c \ubcc0\uacbd\ud560 \uc218 \uc788\uc74c")))),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \uac00 \ud568\uaed8 \uc4f0\uc77c \ub54c\ub294 \ubcf4\ud1b5 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc5d0\uc11c \uc801\uc740 \uba85\ub839\uc758 arguments(parameters) \ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc5d0\uc11c \uc8fc\uc11d\uc73c\ub85c \ud45c\uc2dc\ub41c \ubd80\ubd84\ub4e4\uc744 \ud574\uc81c\ud558\uba70 \ube4c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null}),(0,a.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"NO CMD")),(0,a.kt)("td",{parentName:"tr",align:null},"Error!"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,a.kt)("td",{parentName:"tr",align:null},"x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD x y")),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"In Kubernetes pod",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," \u2192 command"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD")," \u2192 args")))),(0,a.kt)("h3",{id:"docker-tag-\uc774\ub984-\uc9d3\uae30"},"Docker tag \uc774\ub984 \uc9d3\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc758 tag \ub85c ",(0,a.kt)("strong",{parentName:"p"},"latest \ub294 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5"),"\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc720\ub294 latest \ub294 default tag name \uc774\ubbc0\ub85c ",(0,a.kt)("strong",{parentName:"p"},"\uc758\ub3c4\uce58 \uc54a\uac8c overwritten")," \ub418\ub294 \uacbd\uc6b0\uac00 \ub108\ubb34 \ub9ce\uc774 \ubc1c\uc0dd\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub098\uc758 \uc774\ubbf8\uc9c0\ub294 \ud558\ub098\uc758 \ud0dc\uadf8\ub97c \uac00\uc9d0(",(0,a.kt)("strong",{parentName:"p"},"uniqueness"),")\uc744 \ubcf4\uc7a5\ud574\uc57c \ucd94\ud6c4 Production \ub2e8\uacc4\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\ud611\uc5c5/\ub514\ubc84\uae45"),"\uc5d0 \uc6a9\uc774\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub0b4\uc6a9\uc740 \ub2e4\ub974\uc9c0\ub9cc, \ub3d9\uc77c\ud55c tag \ub97c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 \ucd94\ud6c4 dangling image \ub85c \ucde8\uae09\ub418\uc5b4 \uad00\ub9ac\ud558\uae30 \uc5b4\ub824\uc6cc\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","dangling image\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker images"),"\uc5d0\ub294 \ub098\uc624\uc9c0 \uc54a\uc9c0\ub9cc \uacc4\uc18d\ud574\uc11c \uc800\uc7a5\uc18c\ub97c \ucc28\uc9c0\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"etc"},"ETC"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"log \ub4f1\uc758 \uc815\ubcf4\ub294 container \ub0b4\ubd80\uac00 \uc544\ub2cc \uacf3\uc5d0 \ub530\ub85c \uc800\uc7a5\ud569\ub2c8\ub2e4.\ncontainer \ub0b4\ubd80\uc5d0\uc11c write \ud55c data \ub294 \uc5b8\uc81c\ub4e0\uc9c0 \uc0ac\ub77c\uc9c8 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"secret \ud55c \uc815\ubcf4, \ud658\uacbd(dev/prod) dependent \ud55c \uc815\ubcf4 \ub4f1\uc740 Dockerfile \uc5d0 \uc9c1\uc811 \uc801\ub294 \uac8c \uc544\ub2c8\ub77c, env var \ub610\ub294 .env config file \uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"Dockerfile ",(0,a.kt)("strong",{parentName:"li"},"linter")," \ub3c4 \uc874\uc7ac\ud558\ubbc0\ub85c, \ud611\uc5c5 \uc2dc\uc5d0\ub294 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,a.kt)("h2",{id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158"},"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158"),(0,a.kt)("h3",{id:"docker-run-with-volume"},"docker run with volume"),(0,a.kt)("p",null,"Docker container \uc0ac\uc6a9 \uc2dc \ubd88\ud3b8\ud55c \uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.\n\ubc14\ub85c Docker\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ubaa8\ub4e0 \uc0ac\ud56d\uc740 \uc800\uc7a5\ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc774\uc720\ub294 Docker container \ub294 \uac01\uac01 \uaca9\ub9ac\ub41c \ud30c\uc77c\uc2dc\uc2a4\ud15c\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c, ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec docker container \ub07c\ub9ac \ub370\uc774\ud130\ub97c \uacf5\uc720\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c Docker\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"2 \uac00\uc9c0"),"\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"storage.png",src:n(5940).Z,width:"501",height:"255"})),(0,a.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"docker cli \ub97c \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"li"},"volume")," \uc774\ub77c\ub294 \ub9ac\uc18c\uc2a4\ub97c \uc9c1\uc811 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"host \uc5d0\uc11c Docker area(",(0,a.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") \uc544\ub798\uc5d0 \ud2b9\uc815 \ub514\ub809\ud1a0\ub9ac\ub97c \uc0dd\uc131\ud55c \ub2e4\uc74c, \ud574\ub2f9 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"host \uc758 \ud2b9\uc815 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"how-to-use"},"How to use?"),(0,a.kt)("p",null,"\uc0ac\uc6a9 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ub3d9\uc77c\ud55c \uc778\ud130\ud398\uc774\uc2a4"),"\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"-v")," \uc635\uc158\uc744 \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc, volume \uc744 \uc0ac\uc6a9\ud560 \ub54c\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume rm")," \ub4f1\uc744 \uc218\ud589\ud558\uc5ec \uc9c1\uc811 \uad00\ub9ac\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Docker volume"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Blind mount"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,a.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uac1c\ubc1c\ud560 \ub54c\ub294 bind mount \uac00 \ud3b8\ud558\uae34 \ud558\uc9c0\ub9cc, \ud658\uacbd\uc744 \uae54\ub054\ud558\uac8c \uc720\uc9c0\ud558\uace0 \uc2f6\ub2e4\uba74 docker volume \uc744 \uc0ac\uc6a9\ud558\uc5ec create, rm \uc744 \uba85\uc2dc\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\ub3c4 \ud558\ub098\uc758 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\ub3c4 \uacb0\uad6d docker \uc758 bind mount \ub97c \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-resource-limit"},"docker run with resource limit"),(0,a.kt)("p",null,"\uae30\ubcf8\uc801\uc73c\ub85c docker container \ub294 ",(0,a.kt)("strong",{parentName:"p"},"host OS \uc758 cpu, memory \uc790\uc6d0\uc744 fully \uc0ac\uc6a9"),"\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc774\ub807\uac8c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 host OS \uc758 \uc790\uc6d0 \uc0c1\ud669\uc5d0 \ub530\ub77c\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OOM")," \ub4f1\uc758 \uc774\uc288\ub85c docker container \uac00 \ube44\uc815\uc0c1\uc801\uc73c\ub85c \uc885\ub8cc\ub418\ub294 \uc0c1\ud669\uc774 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7f0 \ubb38\uc81c\ub97c \ub2e4\ub8e8\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"docker container \uc2e4\ud589 \uc2dc, cpu \uc640 memory \uc758 \uc0ac\uc6a9\ub7c9 \uc81c\ud55c"),"\uc744 \uac78 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"\uc635\uc158"),"\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,a.kt)("p",null,"\uc704\uc758 \ub3c4\ucee4\ub97c \uc2e4\ud589 \ud6c4 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker stats")," \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \uc0ac\uc6a9\ub7c9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c pod \ub77c\ub294 \ub9ac\uc18c\uc2a4\uc5d0 cpu, memory \uc81c\ud55c\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,a.kt)("p",null,"\ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uac00 \uacc4\uc18d\ud574\uc11c running \uc0c1\ud0dc\ub97c \uc720\uc9c0\uc2dc\ucf1c\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774\ub7f0 \uacbd\uc6b0\ub97c \uc704\ud574\uc11c \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc885\ub8cc\ub418\uc790\ub9c8\uc790 \ubc14\ub85c \uc7ac\uc0dd\uc131\uc744 \uc2dc\ub3c4\ud560 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"--restart=always")," \uc635\uc158\uc744 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc635\uc158 \uc785\ub825 \ud6c4 \ub3c4\ucee4\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps"),"\ub97c \ud1b5\ud574 \uc7ac\uc2e4\ud589\uc774 \ub418\uace0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.\n\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uace0 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 STATUS\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"on-failure with max retries"),(0,a.kt)("li",{parentName:"ul"},"always \ub4f1\uc758 \uc120\ud0dd\uc9c0 \uc81c\uacf5")))),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c job \uc774\ub77c\ub294 resource \uc758 restart \uc635\uc158\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-as-a-background-process"},"docker run as a background process"),(0,a.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud560 \ub54c\ub294 \uae30\ubcf8\uc801\uc73c\ub85c foreground process \ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud55c \ud130\ubbf8\ub110\uc774 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0 \uc790\ub3d9\uc73c\ub85c attach \ub418\uc5b4 \uc788\uc5b4, \ub2e4\ub978 \uba85\ub839\uc744 \uc2e4\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc608\uc2dc\ub97c \uc218\ud589\ud574\ubd05\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ud130\ubbf8\ub110 2 \uac1c\ub97c \uc5f4\uc5b4, \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker ps")," \ub97c \uc9c0\ucf1c\ubcf4\uace0, \ub2e4\ub978 \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc744 \ucc28\ub840\ub85c \uc2e4\ud589\ud574\ubcf4\uba70 \ub3d9\uc791\uc744 \uc9c0\ucf1c\ubd05\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"first-practice"},"First Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"10 \ucd08\ub3d9\uc548 \uba48\ucdb0 \uc788\uc5b4\uc57c \ud558\uace0, \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. 10\ucd08 \ub4a4\uc5d0\ub294 docker ps \uc5d0\uc11c container \uac00 \uc885\ub8cc\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"second-practice"},"Second Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"\uc774\ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + q")),(0,a.kt)("p",null,"\ud574\ub2f9 \ud130\ubbf8\ub110\uc5d0\uc11c \uc774\uc81c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc788\uac8c \ub418\uc5c8\uc73c\uba70, docker ps \ub85c\ub3c4 10\ucd08\uae4c\uc9c0\ub294 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc0b4\uc544\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub807\uac8c docker container \ub0b4\ubd80\uc5d0\uc11c \ube60\uc838\ub098\uc628 \uc0c1\ud669\uc744 detached \ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4.\n\ub3c4\ucee4\uc5d0\uc11c\ub294 run \uc744 \uc2e4\ud589\ud568\uacfc \ub3d9\uc2dc\uc5d0 detached mode \ub85c \uc2e4\ud589\uc2dc\ud0ac \uc218 \uc788\ub294 \uc635\uc158\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"third-practice"},"Third Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,a.kt)("p",null,"detached mode \uc774\ubbc0\ub85c \ud574\ub2f9 \uba85\ub839\uc744 \uc2e4\ud589\uc2dc\ud0a8 \ud130\ubbf8\ub110\uc5d0\uc11c \ub2e4\ub978 \uc561\uc158\uc744 \uc218\ud589\uc2dc\ud0ac \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc0c1\ud669\uc5d0 \ub530\ub77c detached mode \ub97c \uc801\uc808\ud788 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4, DB \uc640 \ud1b5\uc2e0\ud558\ub294 Backend API server \ub97c \uac1c\ubc1c\ud560 \ub54c Backend API server \ub294 source code \ub97c \ubcc0\uacbd\uc2dc\ucf1c\uac00\uba74\uc11c hot-loading \uc73c\ub85c \uacc4\uc18d\ud574\uc11c \ub85c\uadf8\ub97c \ud655\uc778\ud574\ubd10\uc57c \ud558\uc9c0\ub9cc, DB \ub294 \ub85c\uadf8\ub97c \uc9c0\ucf1c\ubcfc \ud544\uc694\ub294 \uc5c6\ub294 \uacbd\uc6b0\ub77c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","DB \ub294 docker container \ub97c detached mode \ub85c \uc2e4\ud589\uc2dc\ud0a4\uace0, Backend API server \ub294 attached mode \ub85c log \ub97c following \ud558\uba74\uc11c \uc2e4\ud589\uc2dc\ud0a4\uba74 \ud6a8\uc728\uc801\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}s.isMDXComponent=!0},6045:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},5940:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file diff --git a/assets/js/91e4f63c.2b363c62.js b/assets/js/91e4f63c.78febf18.js similarity index 99% rename from assets/js/91e4f63c.2b363c62.js rename to assets/js/91e4f63c.78febf18.js index 19bbdb81..57ad6479 100644 --- a/assets/js/91e4f63c.2b363c62.js +++ b/assets/js/91e4f63c.78febf18.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[822],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var p=a.createContext({}),s=function(e){var n=a.useContext(p),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},m=function(e){var n=s(e.components);return a.createElement(p.Provider,{value:n},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),d=s(t),c=r,k=d["".concat(p,".").concat(c)]||d[c]||u[c]||l;return t?a.createElement(k,o(o({ref:n},m),{},{components:t})):a.createElement(k,o({ref:n},m))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=t.length,o=new Array(l);o[0]=c;var i={};for(var p in n)hasOwnProperty.call(n,p)&&(i[p]=n[p]);i.originalType=e,i[d]="string"==typeof e?e:r,o[1]=i;for(var s=2;s{t.r(n),t.d(n,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>i,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const l={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"version-1.0/api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/docs/1.0/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-fields.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/docs/1.0/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/docs/1.0/api-deployment/seldon-mlflow"}},p={},s=[{value:"How Seldon Core works?",id:"how-seldon-core-works",level:2},{value:"SeldonDeployment Spec",id:"seldondeployment-spec",level:2},{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"initContainer",id:"initcontainer",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],m={toc:s},d="wrapper";function u(e){let{components:n,...l}=e;return(0,r.kt)(d,(0,a.Z)({},m,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"how-seldon-core-works"},"How Seldon Core works?"),(0,r.kt)("p",null,"Seldon Core\uac00 API \uc11c\ubc84\ub97c \uc0dd\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc694\uc57d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-fields-0.png",src:t(658).Z,width:"2784",height:"1000"})),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"initContainer\ub294 \ubaa8\ub378 \uc800\uc7a5\uc18c\uc5d0\uc11c \ud544\uc694\ud55c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 container\ub85c \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"container\ub294 \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \uac10\uc2fc API \uc11c\ubc84\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\uc0dd\uc131\ub41c API \uc11c\ubc84 \uc8fc\uc18c\ub85c API\ub97c \uc694\uccad\ud558\uc5ec \ubaa8\ub378\uc758 \ucd94\ub860 \uac12\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("h2",{id:"seldondeployment-spec"},"SeldonDeployment Spec"),(0,r.kt)("p",null,"Seldon Core\ub97c \uc0ac\uc6a9\ud560 \ub54c, \uc8fc\ub85c \uc0ac\uc6a9\ud558\uac8c \ub418\ub294 \ucee4\uc2a4\ud140 \ub9ac\uc18c\uc2a4\uc778 SeldonDeployment\ub97c \uc815\uc758\ud558\ub294 yaml \ud30c\uc77c\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,r.kt)("p",null,"SeldonDeployment spec \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"name")," \uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"predictors")," \ud544\ub4dc\ub294 required \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"predictors"),"\ub294 \ud55c \uac1c\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph")," \uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c\ub3c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph"),"\uc5d0\uc11c \uc815\uc758\ud574\uc57c \ud560 \ud544\ub4dc\ub4e4\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \ub294 \ud558\ub098\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \ud0a4\uac12\uc774 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \uc5d0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"initContainers"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"containers")," \uc758 \ud544\ub4dc\uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"volumes"},"volumes"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),"\uc740 initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uae30 \uc704\ud55c \uacf5\uac04\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uc785\ub825\uc744 \ubc1b\uc73c\uba70 array\uc758 \uad6c\uc131 \uc694\uc18c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"emptyDir")," \uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\ub4e4\uc740 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uace0 \uc62e\uae38 \ub54c \ud55c\ubc88 \uc0ac\uc6a9\ub418\ubbc0\ub85c \ud06c\uac8c \uc218\uc815\ud558\uc9c0 \uc54a\uc544\ub3c4 \ub429\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"initcontainer"},"initContainer"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,r.kt)("p",null,"initContainer\ub294 API\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ub418\ub294 \ud544\ub4dc\ub4e4\uc740 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub85c\ubd80\ud130 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ub54c \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \uc815\ud574\uc90d\ub2c8\ub2e4."),(0,r.kt)("p",null,"initContainer\uc758 \uac12\uc740 n\uac1c\uc758 array\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc73c\uba70 \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\ub9c8\ub2e4 \uac01\uac01 \uc9c0\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"name"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub514\ubc84\uae45\uc744 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," \ub85c \uc0ac\uc6a9\ud558\uae38 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uae30 \uc704\ud574 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","seldon core\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \uc774\ubbf8\uc9c0\ub294 \ud06c\uac8c \ub450 \uac00\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,r.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,r.kt)("p",null,"\uac01\uac01\uc758 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uc744 \ucc38\uace0 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c\ub294 kfserving\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"args"},"args"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,r.kt)("p",null,"gcr.io/kfserving/storage-initializer:v0.4.0 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uac00 \uc2e4\ud589(",(0,r.kt)("inlineCode",{parentName:"p"},"run"),")\ub420 \ub54c \uc785\ub825\ubc1b\ub294 argument\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uad6c\uc131\ub418\uba70 \uccab \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub450 \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4. (seldon core\uc5d0\uc11c\ub294 \uc8fc\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.)"),(0,r.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumneMounts"),"\ub294 volumes\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d \ubcfc\ub968\uc744 \ubd99\uc5ec\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 Volume"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"container"},"container"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"container\ub294 \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 API \ud615\uc2dd\uc73c\ub85c \uc2e4\ud589\ub420 \ub54c\uc758 \uc124\uc815\uc744 \uc815\uc758\ud558\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4. "),(0,r.kt)("h4",{id:"name-1"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\uc758 \uc774\ub984\uc744 \uc801\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image-1"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \ub370 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ubbf8\uc9c0\uc5d0\ub294 \ubaa8\ub378\uc774 \ub85c\ub4dc\ub420 \ub54c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uc774 \ubaa8\ub450 \uc124\uce58\ub418\uc5b4 \uc788\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Seldon Core\uc5d0\uc11c \uc9c0\uc6d0\ud558\ub294 \uacf5\uc2dd \uc774\ubbf8\uc9c0\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,r.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,r.kt)("p",null,"initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ub370\uc774\ud130\uac00 \uc788\ub294 \uacbd\ub85c\ub97c \uc54c\ub824\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub54c \ubaa8\ub378\uc774 \uc218\uc815\ub418\ub294 \uac83\uc744 \ubc29\uc9c0\ud558\uae30 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"readOnly: true"),"\ub3c4 \uac19\uc774 \uc8fc\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"securitycontext"},"securityContext"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"\ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud560 \ub54c pod\uc774 \uad8c\ud55c\uc774 \uc5c6\uc5b4\uc11c \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc218\ud589\ud558\uc9c0 \ubabb\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574\uc11c root \uad8c\ud55c\uc744 \ubd80\uc5ec\ud569\ub2c8\ub2e4. (\ub2e4\ub9cc \uc774 \uc791\uc5c5\uc740 \uc2e4\uc81c \uc11c\ube59 \uc2dc \ubcf4\uc548 \ubb38\uc81c\uac00 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"graph"},"graph"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,r.kt)("p",null,"\ubaa8\ub378\uc774 \ub3d9\uc791\ud558\ub294 \uc21c\uc11c\ub97c \uc815\uc758\ud55c \ud544\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"name-2"},"name"),(0,r.kt)("p",null,"\ubaa8\ub378 \uadf8\ub798\ud504\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. container\uc5d0\uc11c \uc815\uc758\ub41c \uc774\ub984\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"type"},"type"),(0,r.kt)("p",null,"type\uc740 \ud06c\uac8c 4\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"MODEL"),(0,r.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"ROUTER")),(0,r.kt)("p",null,"\uac01 type\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"parameters"},"parameters"),(0,r.kt)("p",null,"class init \uc5d0\uc11c \uc0ac\uc6a9\ub418\ub294 \uac12\ub4e4\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","sklearnserver\uc5d0\uc11c \ud544\uc694\ud55c \uac12\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"\ub2e4\uc74c \ud30c\uc77c"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,r.kt)("p",null,"\ucf54\ub4dc\ub97c \ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"model_uri"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"method"),"\ub97c \uc815\uc758\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"children"},"children"),(0,r.kt)("p",null,"\uc21c\uc11c\ub3c4\ub97c \uc791\uc131\ud560 \ub54c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},658:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/seldon-fields-0-7794367220b87e1aba920b6aad6f9bf8.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[822],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var p=a.createContext({}),s=function(e){var n=a.useContext(p),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},m=function(e){var n=s(e.components);return a.createElement(p.Provider,{value:n},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),d=s(t),c=r,k=d["".concat(p,".").concat(c)]||d[c]||u[c]||l;return t?a.createElement(k,o(o({ref:n},m),{},{components:t})):a.createElement(k,o({ref:n},m))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=t.length,o=new Array(l);o[0]=c;var i={};for(var p in n)hasOwnProperty.call(n,p)&&(i[p]=n[p]);i.originalType=e,i[d]="string"==typeof e?e:r,o[1]=i;for(var s=2;s{t.r(n),t.d(n,{assets:()=>p,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>i,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const l={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"version-1.0/api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/versioned_docs/version-1.0/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/docs/1.0/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-fields.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/docs/1.0/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/docs/1.0/api-deployment/seldon-mlflow"}},p={},s=[{value:"How Seldon Core works?",id:"how-seldon-core-works",level:2},{value:"SeldonDeployment Spec",id:"seldondeployment-spec",level:2},{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"initContainer",id:"initcontainer",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],m={toc:s},d="wrapper";function u(e){let{components:n,...l}=e;return(0,r.kt)(d,(0,a.Z)({},m,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"how-seldon-core-works"},"How Seldon Core works?"),(0,r.kt)("p",null,"Seldon Core\uac00 API \uc11c\ubc84\ub97c \uc0dd\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc694\uc57d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-fields-0.png",src:t(658).Z,width:"2784",height:"1000"})),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"initContainer\ub294 \ubaa8\ub378 \uc800\uc7a5\uc18c\uc5d0\uc11c \ud544\uc694\ud55c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 container\ub85c \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"container\ub294 \uc804\ub2ec\ubc1b\uc740 \ubaa8\ub378\uc744 \uac10\uc2fc API \uc11c\ubc84\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ol"},"\uc0dd\uc131\ub41c API \uc11c\ubc84 \uc8fc\uc18c\ub85c API\ub97c \uc694\uccad\ud558\uc5ec \ubaa8\ub378\uc758 \ucd94\ub860 \uac12\uc744 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,r.kt)("h2",{id:"seldondeployment-spec"},"SeldonDeployment Spec"),(0,r.kt)("p",null,"Seldon Core\ub97c \uc0ac\uc6a9\ud560 \ub54c, \uc8fc\ub85c \uc0ac\uc6a9\ud558\uac8c \ub418\ub294 \ucee4\uc2a4\ud140 \ub9ac\uc18c\uc2a4\uc778 SeldonDeployment\ub97c \uc815\uc758\ud558\ub294 yaml \ud30c\uc77c\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,r.kt)("p",null,"SeldonDeployment spec \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"name")," \uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"predictors")," \ud544\ub4dc\ub294 required \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"predictors"),"\ub294 \ud55c \uac1c\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph")," \uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c\ub3c4 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 pod\uc758 \uad6c\ubd84\uc744 \uc704\ud55c \uc774\ub984\uc73c\ub85c \ud06c\uac8c \uc601\ud5a5\uc744 \ubbf8\uce58\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc774\uc81c ",(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"graph"),"\uc5d0\uc11c \uc815\uc758\ud574\uc57c \ud560 \ud544\ub4dc\ub4e4\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"componentSpecs")," \ub294 \ud558\ub098\ub85c \uad6c\uc131\ub41c array\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \ud0a4\uac12\uc774 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},"spec")," \uc5d0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"initContainers"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"containers")," \uc758 \ud544\ub4dc\uac00 \uc815\uc758\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"volumes"},"volumes"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumes"),"\uc740 initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uae30 \uc704\ud55c \uacf5\uac04\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uc785\ub825\uc744 \ubc1b\uc73c\uba70 array\uc758 \uad6c\uc131 \uc694\uc18c\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uacfc ",(0,r.kt)("inlineCode",{parentName:"p"},"emptyDir")," \uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\ub4e4\uc740 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uace0 \uc62e\uae38 \ub54c \ud55c\ubc88 \uc0ac\uc6a9\ub418\ubbc0\ub85c \ud06c\uac8c \uc218\uc815\ud558\uc9c0 \uc54a\uc544\ub3c4 \ub429\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"initcontainer"},"initContainer"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,r.kt)("p",null,"initContainer\ub294 API\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c \uc0ac\uc6a9\ub418\ub294 \ud544\ub4dc\ub4e4\uc740 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub85c\ubd80\ud130 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ub54c \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \uc815\ud574\uc90d\ub2c8\ub2e4."),(0,r.kt)("p",null,"initContainer\uc758 \uac12\uc740 n\uac1c\uc758 array\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc73c\uba70 \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\ub9c8\ub2e4 \uac01\uac01 \uc9c0\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"name"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub514\ubc84\uae45\uc744 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," \ub85c \uc0ac\uc6a9\ud558\uae38 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uae30 \uc704\ud574 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0 \uc774\ub984\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","seldon core\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \uc774\ubbf8\uc9c0\ub294 \ud06c\uac8c \ub450 \uac00\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,r.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,r.kt)("p",null,"\uac01\uac01\uc758 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uc744 \ucc38\uace0 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,r.kt)("p",null,(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c\ub294 kfserving\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"args"},"args"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,r.kt)("p",null,"gcr.io/kfserving/storage-initializer:v0.4.0 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uac00 \uc2e4\ud589(",(0,r.kt)("inlineCode",{parentName:"p"},"run"),")\ub420 \ub54c \uc785\ub825\ubc1b\ub294 argument\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","array\ub85c \uad6c\uc131\ub418\uba70 \uccab \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub450 \ubc88\uc9f8 array\uc758 \uac12\uc740 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc8fc\uc18c\ub97c \uc801\uc2b5\ub2c8\ub2e4. (seldon core\uc5d0\uc11c\ub294 \uc8fc\ub85c ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.)"),(0,r.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"volumneMounts"),"\ub294 volumes\uc5d0\uc11c \uc124\uba85\ud55c \uac83\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"/mnt/models"),"\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc5d0\uc11c \uacf5\uc720\ud560 \uc218 \uc788\ub3c4\ub85d \ubcfc\ub968\uc744 \ubd99\uc5ec\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 Volume"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"container"},"container"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"container\ub294 \uc2e4\uc81c\ub85c \ubaa8\ub378\uc774 API \ud615\uc2dd\uc73c\ub85c \uc2e4\ud589\ub420 \ub54c\uc758 \uc124\uc815\uc744 \uc815\uc758\ud558\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4. "),(0,r.kt)("h4",{id:"name-1"},"name"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"name"),"\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0c1\uc758 pod\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. \uc0ac\uc6a9\ud558\ub294 \ubaa8\ub378\uc758 \uc774\ub984\uc744 \uc801\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"image-1"},"image"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"image")," \ub294 \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \ub370 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ubbf8\uc9c0\uc5d0\ub294 \ubaa8\ub378\uc774 \ub85c\ub4dc\ub420 \ub54c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uc774 \ubaa8\ub450 \uc124\uce58\ub418\uc5b4 \uc788\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"Seldon Core\uc5d0\uc11c \uc9c0\uc6d0\ud558\ub294 \uacf5\uc2dd \uc774\ubbf8\uc9c0\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,r.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,r.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,r.kt)("p",null,"initContainer\uc5d0\uc11c \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc740 \ub370\uc774\ud130\uac00 \uc788\ub294 \uacbd\ub85c\ub97c \uc54c\ub824\uc8fc\ub294 \ud544\ub4dc\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub54c \ubaa8\ub378\uc774 \uc218\uc815\ub418\ub294 \uac83\uc744 \ubc29\uc9c0\ud558\uae30 \uc704\ud574 ",(0,r.kt)("inlineCode",{parentName:"p"},"readOnly: true"),"\ub3c4 \uac19\uc774 \uc8fc\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h4",{id:"securitycontext"},"securityContext"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,r.kt)("p",null,"\ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud560 \ub54c pod\uc774 \uad8c\ud55c\uc774 \uc5c6\uc5b4\uc11c \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc218\ud589\ud558\uc9c0 \ubabb\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574\uc11c root \uad8c\ud55c\uc744 \ubd80\uc5ec\ud569\ub2c8\ub2e4. (\ub2e4\ub9cc \uc774 \uc791\uc5c5\uc740 \uc2e4\uc81c \uc11c\ube59 \uc2dc \ubcf4\uc548 \ubb38\uc81c\uac00 \uc0dd\uae38 \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"graph"},"graph"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,r.kt)("p",null,"\ubaa8\ub378\uc774 \ub3d9\uc791\ud558\ub294 \uc21c\uc11c\ub97c \uc815\uc758\ud55c \ud544\ub4dc\uc785\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"name-2"},"name"),(0,r.kt)("p",null,"\ubaa8\ub378 \uadf8\ub798\ud504\uc758 \uc774\ub984\uc785\ub2c8\ub2e4. container\uc5d0\uc11c \uc815\uc758\ub41c \uc774\ub984\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"type"},"type"),(0,r.kt)("p",null,"type\uc740 \ud06c\uac8c 4\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"MODEL"),(0,r.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,r.kt)("li",{parentName:"ol"},"ROUTER")),(0,r.kt)("p",null,"\uac01 type\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"\uc744 \ucc38\uc870 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"parameters"},"parameters"),(0,r.kt)("p",null,"class init \uc5d0\uc11c \uc0ac\uc6a9\ub418\ub294 \uac12\ub4e4\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","sklearnserver\uc5d0\uc11c \ud544\uc694\ud55c \uac12\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"\ub2e4\uc74c \ud30c\uc77c"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,r.kt)("p",null,"\ucf54\ub4dc\ub97c \ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"model_uri"),"\uc640 ",(0,r.kt)("inlineCode",{parentName:"p"},"method"),"\ub97c \uc815\uc758\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"children"},"children"),(0,r.kt)("p",null,"\uc21c\uc11c\ub3c4\ub97c \uc791\uc131\ud560 \ub54c \uc0ac\uc6a9\ub429\ub2c8\ub2e4. \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \ub2e4\uc74c \ud398\uc774\uc9c0\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."))}u.isMDXComponent=!0},658:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/seldon-fields-0-7794367220b87e1aba920b6aad6f9bf8.png"}}]); \ No newline at end of file diff --git a/assets/js/92059b0a.f442418e.js b/assets/js/92059b0a.e3c8943c.js similarity index 99% rename from assets/js/92059b0a.f442418e.js rename to assets/js/92059b0a.e3c8943c.js index a15030a2..65c3770e 100644 --- a/assets/js/92059b0a.f442418e.js +++ b/assets/js/92059b0a.e3c8943c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5658],{3905:(n,e,r)=>{r.d(e,{Zo:()=>_,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function a(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function u(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):u(u({},e),n)),r},_=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},o="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,a=n.originalType,s=n.parentName,_=i(n,["components","mdxType","originalType","parentName"]),o=m(r),d=p,b=o["".concat(s,".").concat(d)]||o[d]||l[d]||a;return r?t.createElement(b,u(u({ref:e},_),{},{components:r})):t.createElement(b,u({ref:e},_))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var a=r.length,u=new Array(a);u[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[o]="string"==typeof n?n:p,u[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>a,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const a={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/docs/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/docs/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/docs/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/docs/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],_={toc:m},o="wrapper";function l(n){let{components:e,...a}=n;return(0,p.kt)(o,(0,t.Z)({},_,a,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc124\uc815\ud560 \uc218 \uc788\ub294 \uac12\ub4e4\uc5d0 \ub300\ud574 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"display-name"},"Display Name"),(0,p.kt)("p",null,"\uc0dd\uc131\ub41c \ud30c\uc774\ud504\ub77c\uc778 \ub0b4\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub450 \uac1c\uc758 \uc774\ub984\uc744 \uac16\uc2b5\ub2c8\ub2e4."),(0,p.kt)("ul",null,(0,p.kt)("li",{parentName:"ul"},"task_name: \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud560 \ub54c \uc791\uc131\ud55c \ud568\uc218 \uc774\ub984"),(0,p.kt)("li",{parentName:"ul"},"display_name: kubeflow UI\uc0c1\uc5d0 \ubcf4\uc774\ub294 \uc774\ub984")),(0,p.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uacbd\uc6b0 \ub450 \ucef4\ud3ec\ub10c\ud2b8 \ubaa8\ub450 Print and return number\ub85c \uc124\uc815\ub418\uc5b4 \uc788\uc5b4\uc11c \uc5b4\ub5a4 \ucef4\ud3ec\ub10c\ud2b8\uac00 1\ubc88\uc778\uc9c0 2\ubc88\uc778\uc9c0 \ud655\uc778\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"run-7",src:r(1816).Z,width:"3408",height:"2156"})),(0,p.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,p.kt)("p",null,"\uc774\ub97c \uc704\ud55c \uac83\uc774 \ubc14\ub85c display_name \uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc124\uc815\ud558\ub294 \ubc29\ubc95\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_display_name")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute"),"\ub97c \uc774\uc6a9\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc774 \uc804\uc758 \ud30c\uc77c\uacfc \ube44\uad50\ud558\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"pipelines.kubeflow.org/task_display_name")," key\uac00 \uc0c8\ub85c \uc0dd\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,p.kt)("p",null,"\uc704\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc774\uc804\uc5d0 \uc0dd\uc131\ud55c ",(0,p.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc758 \ubc84\uc804\uc744 \uc62c\ub9ac\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"adv-pipeline-0.png",src:r(9072).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub7ec\uba74 \uc704\uc640 \uac19\uc774 \uc124\uc815\ud55c \uc774\ub984\uc774 \ub178\ucd9c\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"resources"},"Resources"),(0,p.kt)("h3",{id:"gpu"},"GPU"),(0,p.kt)("p",null,"\ud2b9\ubcc4\ud55c \uc124\uc815\uc774 \uc5c6\ub2e4\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud30c\ub4dc(pod)\ub85c \uc2e4\ud589\ud560 \ub54c, \uae30\ubcf8 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc73c\ub85c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d GPU\ub97c \uc0ac\uc6a9\ud574 \ubaa8\ub378\uc744 \ud559\uc2b5\ud574\uc57c \ud560 \ub54c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc0c1\uc5d0\uc11c GPU\ub97c \ud560\ub2f9\ubc1b\uc9c0 \ubabb\ud574 \uc81c\ub300\ub85c \ud559\uc2b5\uc774 \uc774\ub8e8\uc5b4\uc9c0\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc0dd\uc131\ub41c \ud30c\uc77c\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),"\ub97c \uc790\uc138\ud788 \ubcf4\uba74 resources\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," \ub3c4 \ucd94\uac00\ub41c \uac83\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \ud1b5\ud574 GPU\ub97c \ud560\ub2f9\ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,p.kt)("h3",{id:"cpu"},"CPU"),(0,p.kt)("p",null,"cpu\uc758 \uac1c\uc218\ub97c \uc815\ud558\uae30 \uc704\ud574\uc11c \uc774\uc6a9\ud558\ub294 \ud568\uc218\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","gpu\uc640\ub294 \ub2e4\ub978 \uc810\uc740 int\uac00 \uc544\ub2cc string\uc73c\ub85c \uc785\ub825\ud574\uc57c \ud55c\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,p.kt)("h3",{id:"memory"},"Memory"),(0,p.kt)("p",null,"\uba54\ubaa8\ub9ac\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},9072:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},1816:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5658],{3905:(n,e,r)=>{r.d(e,{Zo:()=>_,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function a(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function u(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):u(u({},e),n)),r},_=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},o="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,a=n.originalType,s=n.parentName,_=i(n,["components","mdxType","originalType","parentName"]),o=m(r),d=p,b=o["".concat(s,".").concat(d)]||o[d]||l[d]||a;return r?t.createElement(b,u(u({ref:e},_),{},{components:r})):t.createElement(b,u({ref:e},_))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var a=r.length,u=new Array(a);u[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[o]="string"==typeof n?n:p,u[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>a,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const a={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/docs/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/docs/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/docs/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/docs/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],_={toc:m},o="wrapper";function l(n){let{components:e,...a}=n;return(0,p.kt)(o,(0,t.Z)({},_,a,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc124\uc815\ud560 \uc218 \uc788\ub294 \uac12\ub4e4\uc5d0 \ub300\ud574 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"display-name"},"Display Name"),(0,p.kt)("p",null,"\uc0dd\uc131\ub41c \ud30c\uc774\ud504\ub77c\uc778 \ub0b4\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub450 \uac1c\uc758 \uc774\ub984\uc744 \uac16\uc2b5\ub2c8\ub2e4."),(0,p.kt)("ul",null,(0,p.kt)("li",{parentName:"ul"},"task_name: \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud560 \ub54c \uc791\uc131\ud55c \ud568\uc218 \uc774\ub984"),(0,p.kt)("li",{parentName:"ul"},"display_name: kubeflow UI\uc0c1\uc5d0 \ubcf4\uc774\ub294 \uc774\ub984")),(0,p.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 \uacbd\uc6b0 \ub450 \ucef4\ud3ec\ub10c\ud2b8 \ubaa8\ub450 Print and return number\ub85c \uc124\uc815\ub418\uc5b4 \uc788\uc5b4\uc11c \uc5b4\ub5a4 \ucef4\ud3ec\ub10c\ud2b8\uac00 1\ubc88\uc778\uc9c0 2\ubc88\uc778\uc9c0 \ud655\uc778\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"run-7",src:r(1816).Z,width:"3408",height:"2156"})),(0,p.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,p.kt)("p",null,"\uc774\ub97c \uc704\ud55c \uac83\uc774 \ubc14\ub85c display_name \uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc124\uc815\ud558\ub294 \ubc29\ubc95\uc740 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_display_name")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute"),"\ub97c \uc774\uc6a9\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc774 \uc804\uc758 \ud30c\uc77c\uacfc \ube44\uad50\ud558\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"pipelines.kubeflow.org/task_display_name")," key\uac00 \uc0c8\ub85c \uc0dd\uc131\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,p.kt)("p",null,"\uc704\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc77c\uc744 \uc774\uc6a9\ud574 \uc774\uc804\uc5d0 \uc0dd\uc131\ud55c ",(0,p.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc758 \ubc84\uc804\uc744 \uc62c\ub9ac\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"adv-pipeline-0.png",src:r(9072).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub7ec\uba74 \uc704\uc640 \uac19\uc774 \uc124\uc815\ud55c \uc774\ub984\uc774 \ub178\ucd9c\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"resources"},"Resources"),(0,p.kt)("h3",{id:"gpu"},"GPU"),(0,p.kt)("p",null,"\ud2b9\ubcc4\ud55c \uc124\uc815\uc774 \uc5c6\ub2e4\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud30c\ub4dc(pod)\ub85c \uc2e4\ud589\ud560 \ub54c, \uae30\ubcf8 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc73c\ub85c \uc2e4\ud589\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d GPU\ub97c \uc0ac\uc6a9\ud574 \ubaa8\ub378\uc744 \ud559\uc2b5\ud574\uc57c \ud560 \ub54c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc0c1\uc5d0\uc11c GPU\ub97c \ud560\ub2f9\ubc1b\uc9c0 \ubabb\ud574 \uc81c\ub300\ub85c \ud559\uc2b5\uc774 \uc774\ub8e8\uc5b4\uc9c0\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \uc704\ud574 ",(0,p.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc0dd\uc131\ub41c \ud30c\uc77c\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),"\ub97c \uc790\uc138\ud788 \ubcf4\uba74 resources\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," \ub3c4 \ucd94\uac00\ub41c \uac83\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub97c \ud1b5\ud574 GPU\ub97c \ud560\ub2f9\ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,p.kt)("h3",{id:"cpu"},"CPU"),(0,p.kt)("p",null,"cpu\uc758 \uac1c\uc218\ub97c \uc815\ud558\uae30 \uc704\ud574\uc11c \uc774\uc6a9\ud558\ub294 \ud568\uc218\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","gpu\uc640\ub294 \ub2e4\ub978 \uc810\uc740 int\uac00 \uc544\ub2cc string\uc73c\ub85c \uc785\ub825\ud574\uc57c \ud55c\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,p.kt)("h3",{id:"memory"},"Memory"),(0,p.kt)("p",null,"\uba54\ubaa8\ub9ac\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,p.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"\uc744 \uc774\uc6a9\ud574 \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,p.kt)("p",null,"\ubc14\ub010 \ubd80\ubd84\ub9cc \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},9072:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},1816:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file diff --git a/assets/js/97a152fd.bc56892d.js b/assets/js/97a152fd.ffaf15c3.js similarity index 99% rename from assets/js/97a152fd.bc56892d.js rename to assets/js/97a152fd.ffaf15c3.js index 9d254613..df54c899 100644 --- a/assets/js/97a152fd.bc56892d.js +++ b/assets/js/97a152fd.ffaf15c3.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6012],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function s(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,s=e.originalType,u=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=o(n),d=a,m=c["".concat(u,".").concat(d)]||c[d]||k[d]||s;return n?r.createElement(m,l(l({ref:t},p),{},{components:n})):r.createElement(m,l({ref:t},p))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var s=n.length,l=new Array(s);l[0]=d;var i={};for(var u in t)hasOwnProperty.call(t,u)&&(i[u]=t[u]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>k,frontMatter:()=>s,metadata:()=>i,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const s={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/docs/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"k3s \uc5d0\uc11c\ub294 \uae30\ubcf8\uac12\uc73c\ub85c containerd\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc774\uc6a9\ud574 \uc124\uce58\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc800\ud76c\ub294 GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c docker\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc0ac\uc6a9\ud574\uc57c \ud558\ubbc0\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"--docker")," \uc635\uc158\uc744 \ud1b5\ud574 \ubc31\uc5d4\ub4dc\ub97c docker\ub85c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,a.kt)("p",null,"k3s\ub97c \uc124\uce58 \ud6c4 k3s config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud56d\ubaa9\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\ubcf4\uc548 \ubb38\uc81c\uc640 \uad00\ub828\ub41c \ud0a4\ub4e4\uc740 <...>\ub85c \uac00\ub838\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,a.kt)("p",null,"k3s config\ub97c \ud074\ub7ec\uc2a4\ud130\uc758 kubeconfig\ub85c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c \ubcf5\uc0ac\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,a.kt)("p",null,"\ubcf5\uc0ac\ub41c config \ud30c\uc77c\uc5d0 user\uac00 \uc811\uadfc\ud560 \uc218 \uc788\ub294 \uad8c\ud55c\uc744 \uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,a.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,a.kt)("p",null,"\uc774\uc81c \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc124\uc815\ud55c kubeconfig\ub97c \ub85c\uceec\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4.\n\ub85c\uceec\uc5d0\uc11c\ub294 \uacbd\ub85c\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"~/.kube/config"),"\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucc98\uc74c \ubcf5\uc0ac\ud55c config \ud30c\uc77c\uc5d0\ub294 server ip\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443")," \uc73c\ub85c \ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\uc744 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\uac8c \uc218\uc815\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\ucdb0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," \uc73c\ub85c \uc218\uc815\ud588\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,a.kt)("h2",{id:"6-references"},"6. References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}k.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6012],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function s(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,s=e.originalType,u=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=o(n),d=a,m=c["".concat(u,".").concat(d)]||c[d]||k[d]||s;return n?r.createElement(m,l(l({ref:t},p),{},{components:n})):r.createElement(m,l({ref:t},p))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var s=n.length,l=new Array(s);l[0]=d;var i={};for(var u in t)hasOwnProperty.call(t,u)&&(i[u]=t[u]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>k,frontMatter:()=>s,metadata:()=>i,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const s={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/docs/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"k3s \uc5d0\uc11c\ub294 \uae30\ubcf8\uac12\uc73c\ub85c containerd\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc774\uc6a9\ud574 \uc124\uce58\ud569\ub2c8\ub2e4.\n\ud558\uc9c0\ub9cc \uc800\ud76c\ub294 GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c docker\ub97c \ubc31\uc5d4\ub4dc\ub85c \uc0ac\uc6a9\ud574\uc57c \ud558\ubbc0\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"--docker")," \uc635\uc158\uc744 \ud1b5\ud574 \ubc31\uc5d4\ub4dc\ub97c docker\ub85c \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,a.kt)("p",null,"k3s\ub97c \uc124\uce58 \ud6c4 k3s config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud56d\ubaa9\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\ubcf4\uc548 \ubb38\uc81c\uc640 \uad00\ub828\ub41c \ud0a4\ub4e4\uc740 <...>\ub85c \uac00\ub838\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,a.kt)("p",null,"k3s config\ub97c \ud074\ub7ec\uc2a4\ud130\uc758 kubeconfig\ub85c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c \ubcf5\uc0ac\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,a.kt)("p",null,"\ubcf5\uc0ac\ub41c config \ud30c\uc77c\uc5d0 user\uac00 \uc811\uadfc\ud560 \uc218 \uc788\ub294 \uad8c\ud55c\uc744 \uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,a.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,a.kt)("p",null,"\uc774\uc81c \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc124\uc815\ud55c kubeconfig\ub97c \ub85c\uceec\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4.\n\ub85c\uceec\uc5d0\uc11c\ub294 \uacbd\ub85c\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"~/.kube/config"),"\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucc98\uc74c \ubcf5\uc0ac\ud55c config \ud30c\uc77c\uc5d0\ub294 server ip\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443")," \uc73c\ub85c \ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774 \uac12\uc744 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\uac8c \uc218\uc815\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","(\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud074\ub7ec\uc2a4\ud130\uc758 ip\uc5d0 \ub9de\ucdb0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," \uc73c\ub85c \uc218\uc815\ud588\uc2b5\ub2c8\ub2e4.)"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,a.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,a.kt)("h2",{id:"6-references"},"6. References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9a73b948.e2700338.js b/assets/js/9a73b948.9914337f.js similarity index 98% rename from assets/js/9a73b948.e2700338.js rename to assets/js/9a73b948.9914337f.js index bf173d77..debbaf66 100644 --- a/assets/js/9a73b948.e2700338.js +++ b/assets/js/9a73b948.9914337f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1725],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,u=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),c=o(n),f=i,m=c["".concat(u,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var p={};for(var u in t)hasOwnProperty.call(t,u)&&(p[u]=t[u]);p.originalType=e,p[c]="string"==typeof e?e:i,l[1]=p;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>p,toc:()=>o});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"kubeflow/basic-run",id:"kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/docs/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/docs/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/docs/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/kubeflow/advanced-component"}},u={},o=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Create Run \uc120\ud0dd",id:"1-create-run-\uc120\ud0dd",level:3},{value:"2. Experiment \uc120\ud0dd",id:"2-experiment-\uc120\ud0dd",level:3},{value:"3. Pipeline Config \uc785\ub825",id:"3-pipeline-config-\uc785\ub825",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:o},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"\uc774\uc81c \uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\uc2dc\ucf1c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiment\ub780 Kubeflow \uc5d0\uc11c \uc2e4\ud589\ub418\ub294 Run\uc744 \ub17c\ub9ac\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ub2e8\uc704\uc785\ub2c8\ub2e4. "),(0,i.kt)("p",null,"Kubeflow\uc5d0\uc11c namespace\ub97c \ucc98\uc74c \ub4e4\uc5b4\uc624\uba74 \uc0dd\uc131\ub418\uc5b4 \uc788\ub294 Experiment\uac00 \uc5c6\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc804\uc5d0 \ubbf8\ub9ac Experiment\ub97c \uc0dd\uc131\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. Experiment\uc774 \uc788\ub2e4\uba74 ",(0,i.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"\uc73c\ub85c \ub118\uc5b4\uac00\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"Experiment\ub294 Create Experiment \ubc84\ud2bc\uc744 \ud1b5\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(1880).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,"Experiment\ub85c \uc0ac\uc6a9\ud560 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.\n",(0,i.kt)("img",{alt:"run-1.png",src:n(7220).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-create-run-\uc120\ud0dd"},"1. Create Run \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(1721).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-experiment-\uc120\ud0dd"},"2. Experiment \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(5638).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(7740).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-pipeline-config-\uc785\ub825"},"3. Pipeline Config \uc785\ub825"),(0,i.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc0dd\uc131\ud560 \ub54c \uc785\ub825\ud55c Config \uac12\ub4e4\uc744 \ucc44\uc6cc \ub123\uc2b5\ub2c8\ub2e4.\n\uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc740 number_1\uacfc number_2\ub97c \uc785\ub825\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(6038).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"\uc785\ub825 \ud6c4 Start \ubc84\ud2bc\uc744 \ub204\ub974\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(4378).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"\uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\ub4e4\uc740 Runs \ud0ed\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nRun\uc744 \ud074\ub9ad\ud558\uba74 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uacfc \uad00\ub828\ub41c \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(9818).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4. \uc544\uc9c1 \uc2e4\ud589\ub418\uc9c0 \uc54a\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ud68c\uc0c9 \ud45c\uc2dc\ub85c \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(6024).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\uc774 \uc644\ub8cc\ub418\uba74 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc\uac00 \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(1816).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"\uac00\uc7a5 \ub9c8\uc9c0\ub9c9 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ubcf4\uba74 \uc785\ub825\ud55c Config\uc778 3\uacfc 5\uc758 \ud569\uc778 8\uc774 \ucd9c\ub825\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(8673).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},1880:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},7220:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},7740:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},1721:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},6038:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},4378:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},9818:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},6024:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},1816:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},8673:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},5638:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1725],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,u=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),c=o(n),f=i,m=c["".concat(u,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var p={};for(var u in t)hasOwnProperty.call(t,u)&&(p[u]=t[u]);p.originalType=e,p[c]="string"==typeof e?e:i,l[1]=p;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>p,toc:()=>o});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"kubeflow/basic-run",id:"kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/docs/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/docs/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/docs/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/kubeflow/advanced-component"}},u={},o=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Create Run \uc120\ud0dd",id:"1-create-run-\uc120\ud0dd",level:3},{value:"2. Experiment \uc120\ud0dd",id:"2-experiment-\uc120\ud0dd",level:3},{value:"3. Pipeline Config \uc785\ub825",id:"3-pipeline-config-\uc785\ub825",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:o},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"\uc774\uc81c \uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\uc2dc\ucf1c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiment\ub780 Kubeflow \uc5d0\uc11c \uc2e4\ud589\ub418\ub294 Run\uc744 \ub17c\ub9ac\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ub2e8\uc704\uc785\ub2c8\ub2e4. "),(0,i.kt)("p",null,"Kubeflow\uc5d0\uc11c namespace\ub97c \ucc98\uc74c \ub4e4\uc5b4\uc624\uba74 \uc0dd\uc131\ub418\uc5b4 \uc788\ub294 Experiment\uac00 \uc5c6\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc804\uc5d0 \ubbf8\ub9ac Experiment\ub97c \uc0dd\uc131\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. Experiment\uc774 \uc788\ub2e4\uba74 ",(0,i.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"\uc73c\ub85c \ub118\uc5b4\uac00\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"Experiment\ub294 Create Experiment \ubc84\ud2bc\uc744 \ud1b5\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(1880).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,"Experiment\ub85c \uc0ac\uc6a9\ud560 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.\n",(0,i.kt)("img",{alt:"run-1.png",src:n(7220).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-create-run-\uc120\ud0dd"},"1. Create Run \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(1721).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-experiment-\uc120\ud0dd"},"2. Experiment \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(5638).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(7740).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-pipeline-config-\uc785\ub825"},"3. Pipeline Config \uc785\ub825"),(0,i.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc0dd\uc131\ud560 \ub54c \uc785\ub825\ud55c Config \uac12\ub4e4\uc744 \ucc44\uc6cc \ub123\uc2b5\ub2c8\ub2e4.\n\uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc740 number_1\uacfc number_2\ub97c \uc785\ub825\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(6038).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"\uc785\ub825 \ud6c4 Start \ubc84\ud2bc\uc744 \ub204\ub974\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(4378).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"\uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\ub4e4\uc740 Runs \ud0ed\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nRun\uc744 \ud074\ub9ad\ud558\uba74 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uacfc \uad00\ub828\ub41c \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(9818).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4. \uc544\uc9c1 \uc2e4\ud589\ub418\uc9c0 \uc54a\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ud68c\uc0c9 \ud45c\uc2dc\ub85c \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(6024).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\uc774 \uc644\ub8cc\ub418\uba74 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc\uac00 \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(1816).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"\uac00\uc7a5 \ub9c8\uc9c0\ub9c9 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ubcf4\uba74 \uc785\ub825\ud55c Config\uc778 3\uacfc 5\uc758 \ud569\uc778 8\uc774 \ucd9c\ub825\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(8673).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},1880:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},7220:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},7740:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},1721:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},6038:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},4378:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},9818:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},6024:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},1816:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},8673:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},5638:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file diff --git a/assets/js/9b54c487.b0cb9abb.js b/assets/js/9b54c487.58adc8fb.js similarity index 97% rename from assets/js/9b54c487.b0cb9abb.js rename to assets/js/9b54c487.58adc8fb.js index 42ce6ac7..01a02027 100644 --- a/assets/js/9b54c487.b0cb9abb.js +++ b/assets/js/9b54c487.58adc8fb.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1008],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),f=o,d=u["".concat(p,".").concat(f)]||u[f]||m[f]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=f;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"version-1.0/kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/docs/1.0/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-requirements.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/docs/1.0/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/docs/1.0/kubeflow/basic-component"}},p={},s=[],c={toc:s},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc704\ud574 \uad8c\uc7a5\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc740 python>=3.7\uc785\ub2c8\ub2e4. \ud30c\uc774\uc36c \ud658\uacbd\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\uc740 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ucc38\uace0\ud558\uc5ec ",(0,o.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0 \uc124\uce58\ud574\uc8fc\uc2e0 \ub4a4 \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\uae30\uc5d0\uc11c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uacfc \ubc84\uc804\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"\uc55e\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"\ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1008],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),f=o,d=u["".concat(p,".").concat(f)]||u[f]||m[f]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=f;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"version-1.0/kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/docs/1.0/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-requirements.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/docs/1.0/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/docs/1.0/kubeflow/basic-component"}},p={},s=[],c={toc:s},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc704\ud574 \uad8c\uc7a5\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc740 python>=3.7\uc785\ub2c8\ub2e4. \ud30c\uc774\uc36c \ud658\uacbd\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\uc740 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ucc38\uace0\ud558\uc5ec ",(0,o.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0 \uc124\uce58\ud574\uc8fc\uc2e0 \ub4a4 \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\uae30\uc5d0\uc11c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uacfc \ubc84\uc804\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/1.0/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"\uc55e\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"\ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9c5e90dd.bf776eaf.js b/assets/js/9c5e90dd.c76c71f9.js similarity index 98% rename from assets/js/9c5e90dd.bf776eaf.js rename to assets/js/9c5e90dd.c76c71f9.js index 188d8635..87dbf1d5 100644 --- a/assets/js/9c5e90dd.bf776eaf.js +++ b/assets/js/9c5e90dd.c76c71f9.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5867],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},m=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},p=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),u=s(n),p=o,b=u["".concat(c,".").concat(p)]||u[p]||d[p]||a;return n?r.createElement(b,i(i({ref:t},m),{},{components:n})):r.createElement(b,i({ref:t},m))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=p;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>b,contentTitle:()=>d,default:()=>y,frontMatter:()=>u,metadata:()=>p,toc:()=>h});var r=n(7462),o=n(7294),a=n(3905),i=n(5999);function l(e){let{className:t,name:n,children:r,githubUrl:a,linkedinUrl:i,role:l}=e;return o.createElement("div",{className:t},o.createElement("div",{className:"card card--full-height"},o.createElement("div",{className:"card__header"},o.createElement("div",{className:"avatar avatar--vertical"},o.createElement("img",{className:"avatar__photo avatar__photo--xl",src:`${a}.png`,alt:`${n}'s avatar`}),o.createElement("div",{className:"avatar__intro"},o.createElement("h3",{className:"avatar__name"},n)),o.createElement("div",{className:"avatar__role"},o.createElement("h5",{className:"avatar__role"},l)))),o.createElement("div",{className:"card__body"},r),o.createElement("div",{className:"card__footer"},o.createElement("div",{className:"button-group button-group--block"},a&&o.createElement("a",{className:"button button--secondary",href:a},"GitHub"),i&&o.createElement("a",{className:"button button--secondary",href:i},"LinkedIn")))))}function c(e){return o.createElement(l,(0,r.Z)({},e,{className:"col col--6 margin-bottom--lg"}))}function s(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongseob Jeon",githubUrl:"https://github.com/aiden-jeon",linkedinUrl:"https://www.linkedin.com/in/jongseob-jeon/",role:"Project Leader"},o.createElement(i.Z,{id:"team.profile.Jongseob Jeon.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ubaa8\ub450\uc758 \ub525\ub7ec\ub2dd\uc744 \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 \ub525\ub7ec\ub2dd\uc744 \uc27d\uac8c \uc811\ud588\ub4ef\uc774 \ubaa8\ub450\uc758 MLOps\ub97c \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 MLOps\uc5d0 \uc27d\uac8c \uc811\ud560\uc218 \uc788\uae38 \ubc14\ub78d\ub2c8\ub2e4.")),o.createElement(c,{name:"Jayeon Kim",githubUrl:"https://github.com/anencore94",linkedinUrl:"https://www.linkedin.com/in/anencore94",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Jaeyeon Kim.body"},"\ube44\ud6a8\uc728\uc801\uc778 \uc791\uc5c5\uc744 \uc790\ub3d9\ud654\ud558\ub294 \uac83\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngchel Jang",githubUrl:"https://github.com/zamonia500",linkedinUrl:"https://www.linkedin.com/in/youngcheol-jang-b04a45187",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Youngchel Jang.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub2e8\uc21c\ud558\uac8c \uc0dd\uac01\ud558\ub294 \ub178\ub825\uc744 \ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")))}function m(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongsun Shinn",githubUrl:"https://github.com/jsshinn",linkedinUrl:"https://www.linkedin.com/in/jongsun-shinn-311b00140/"},o.createElement(i.Z,{id:"team.profile.Jongsun Shinn.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c ML Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Sangwoo Shim",githubUrl:"https://github.com/borishim",linkedinUrl:"https://www.linkedin.com/in/sangwooshim/"},o.createElement(i.Z,{id:"team.profile.Sangwoo Shim.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c CTO\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub9c8\ud0a4\ub098\ub77d\uc2a4\ub294 \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc0b0\uc5c5\uc6a9 AI \uc194\ub8e8\uc158\uc744 \uac1c\ubc1c\ud558\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc785\ub2c8\ub2e4. \uc0b0\uc5c5 \ud604\uc7a5\uc758 \ubb38\uc81c \ud574\uacb0\uc744 \ud1b5\ud574 \uc0ac\ub78c\uc774 \ubcf8\uc5f0\uc758 \uc77c\uc5d0 \uc9d1\uc911\ud560 \uc218 \uc788\uac8c \ub9cc\ub4dc\ub294 \uac83, \uadf8\uac83\uc774 \uc6b0\ub9ac\uac00 \ud558\ub294 \uc77c\uc785\ub2c8\ub2e4.")),o.createElement(c,{name:"Seunghyun Ko",githubUrl:"https://github.com/kosehy",linkedinUrl:"https://www.linkedin.com/in/seunghyunko/"},o.createElement(i.Z,{id:"team.profile.Seunghyun Ko.body"},"3i\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. kubeflow\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"SeungTae Kim",githubUrl:"https://github.com/RyanKor",linkedinUrl:"https://www.linkedin.com/in/seung-tae-kim-3bb15715b/"},o.createElement(i.Z,{id:"team.profile.SeungTae Kim.body"},"Genesis Lab\uc774\ub77c\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc5d0\uc11c Applied AI Engineer \uc778\ud134 \uc5c5\ubb34\ub97c \uc218\ud589\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc0dd\ud0dc\uacc4\uac00 \uc6b0\ub9ac \uc0b0\uc5c5 \uc804\ubc18\uc5d0 \ud070 \ubcc0\ud654\uc744 \uac00\uc838\uc62c \uac83\uc774\ub77c \ubbff\uc73c\uba70, \ud55c \uac78\uc74c\uc529 \ub098\uc544\uac00\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngdon Tae",githubUrl:"https://github.com/taepd",linkedinUrl:"https://www.linkedin.com/in/taepd/"},o.createElement(i.Z,{id:"team.profile.Youngdon Tae.body"},"\ubc31\ud328\ucee4\uc5d0\uc11c ML \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uc790\uc5f0\uc5b4\ucc98\ub9ac, \ucd94\ucc9c\uc2dc\uc2a4\ud15c, MLOps\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")))}const u={sidebar_position:3},d="Contributors",p={unversionedId:"contributors",id:"contributors",title:"Contributors",description:"Main Authors",source:"@site/community/contributors.md",sourceDirName:".",slug:"/contributors",permalink:"/community/contributors",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/contributors.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{sidebar_position:3},sidebar:"tutorialSidebar",previous:{title:"How to Contribute",permalink:"/community/how-to-contribute"}},b={},h=[{value:"Main Authors",id:"main-authors",level:2},{value:"Contributors",id:"contributors-1",level:2}],g={toc:h},f="wrapper";function y(e){let{components:t,...n}=e;return(0,a.kt)(f,(0,r.Z)({},g,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"contributors"},"Contributors"),(0,a.kt)("h2",{id:"main-authors"},"Main Authors"),(0,a.kt)(s,{mdxType:"MainAuthorRow"}),(0,a.kt)("h2",{id:"contributors-1"},"Contributors"),(0,a.kt)("p",null,"Thank you for contributing our tutorials!"),(0,a.kt)(m,{mdxType:"ContributorsRow"}))}y.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5867],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},m=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},p=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),u=s(n),p=o,b=u["".concat(c,".").concat(p)]||u[p]||d[p]||a;return n?r.createElement(b,i(i({ref:t},m),{},{components:n})):r.createElement(b,i({ref:t},m))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=p;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>b,contentTitle:()=>d,default:()=>y,frontMatter:()=>u,metadata:()=>p,toc:()=>h});var r=n(7462),o=n(7294),a=n(3905),i=n(5999);function l(e){let{className:t,name:n,children:r,githubUrl:a,linkedinUrl:i,role:l}=e;return o.createElement("div",{className:t},o.createElement("div",{className:"card card--full-height"},o.createElement("div",{className:"card__header"},o.createElement("div",{className:"avatar avatar--vertical"},o.createElement("img",{className:"avatar__photo avatar__photo--xl",src:`${a}.png`,alt:`${n}'s avatar`}),o.createElement("div",{className:"avatar__intro"},o.createElement("h3",{className:"avatar__name"},n)),o.createElement("div",{className:"avatar__role"},o.createElement("h5",{className:"avatar__role"},l)))),o.createElement("div",{className:"card__body"},r),o.createElement("div",{className:"card__footer"},o.createElement("div",{className:"button-group button-group--block"},a&&o.createElement("a",{className:"button button--secondary",href:a},"GitHub"),i&&o.createElement("a",{className:"button button--secondary",href:i},"LinkedIn")))))}function c(e){return o.createElement(l,(0,r.Z)({},e,{className:"col col--6 margin-bottom--lg"}))}function s(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongseob Jeon",githubUrl:"https://github.com/aiden-jeon",linkedinUrl:"https://www.linkedin.com/in/jongseob-jeon/",role:"Project Leader"},o.createElement(i.Z,{id:"team.profile.Jongseob Jeon.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ubaa8\ub450\uc758 \ub525\ub7ec\ub2dd\uc744 \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 \ub525\ub7ec\ub2dd\uc744 \uc27d\uac8c \uc811\ud588\ub4ef\uc774 \ubaa8\ub450\uc758 MLOps\ub97c \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 MLOps\uc5d0 \uc27d\uac8c \uc811\ud560\uc218 \uc788\uae38 \ubc14\ub78d\ub2c8\ub2e4.")),o.createElement(c,{name:"Jayeon Kim",githubUrl:"https://github.com/anencore94",linkedinUrl:"https://www.linkedin.com/in/anencore94",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Jaeyeon Kim.body"},"\ube44\ud6a8\uc728\uc801\uc778 \uc791\uc5c5\uc744 \uc790\ub3d9\ud654\ud558\ub294 \uac83\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngchel Jang",githubUrl:"https://github.com/zamonia500",linkedinUrl:"https://www.linkedin.com/in/youngcheol-jang-b04a45187",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Youngchel Jang.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub2e8\uc21c\ud558\uac8c \uc0dd\uac01\ud558\ub294 \ub178\ub825\uc744 \ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")))}function m(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongsun Shinn",githubUrl:"https://github.com/jsshinn",linkedinUrl:"https://www.linkedin.com/in/jongsun-shinn-311b00140/"},o.createElement(i.Z,{id:"team.profile.Jongsun Shinn.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c ML Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Sangwoo Shim",githubUrl:"https://github.com/borishim",linkedinUrl:"https://www.linkedin.com/in/sangwooshim/"},o.createElement(i.Z,{id:"team.profile.Sangwoo Shim.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c CTO\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub9c8\ud0a4\ub098\ub77d\uc2a4\ub294 \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc0b0\uc5c5\uc6a9 AI \uc194\ub8e8\uc158\uc744 \uac1c\ubc1c\ud558\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc785\ub2c8\ub2e4. \uc0b0\uc5c5 \ud604\uc7a5\uc758 \ubb38\uc81c \ud574\uacb0\uc744 \ud1b5\ud574 \uc0ac\ub78c\uc774 \ubcf8\uc5f0\uc758 \uc77c\uc5d0 \uc9d1\uc911\ud560 \uc218 \uc788\uac8c \ub9cc\ub4dc\ub294 \uac83, \uadf8\uac83\uc774 \uc6b0\ub9ac\uac00 \ud558\ub294 \uc77c\uc785\ub2c8\ub2e4.")),o.createElement(c,{name:"Seunghyun Ko",githubUrl:"https://github.com/kosehy",linkedinUrl:"https://www.linkedin.com/in/seunghyunko/"},o.createElement(i.Z,{id:"team.profile.Seunghyun Ko.body"},"3i\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. kubeflow\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"SeungTae Kim",githubUrl:"https://github.com/RyanKor",linkedinUrl:"https://www.linkedin.com/in/seung-tae-kim-3bb15715b/"},o.createElement(i.Z,{id:"team.profile.SeungTae Kim.body"},"Genesis Lab\uc774\ub77c\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc5d0\uc11c Applied AI Engineer \uc778\ud134 \uc5c5\ubb34\ub97c \uc218\ud589\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc0dd\ud0dc\uacc4\uac00 \uc6b0\ub9ac \uc0b0\uc5c5 \uc804\ubc18\uc5d0 \ud070 \ubcc0\ud654\uc744 \uac00\uc838\uc62c \uac83\uc774\ub77c \ubbff\uc73c\uba70, \ud55c \uac78\uc74c\uc529 \ub098\uc544\uac00\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngdon Tae",githubUrl:"https://github.com/taepd",linkedinUrl:"https://www.linkedin.com/in/taepd/"},o.createElement(i.Z,{id:"team.profile.Youngdon Tae.body"},"\ubc31\ud328\ucee4\uc5d0\uc11c ML \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uc790\uc5f0\uc5b4\ucc98\ub9ac, \ucd94\ucc9c\uc2dc\uc2a4\ud15c, MLOps\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")))}const u={sidebar_position:3},d="Contributors",p={unversionedId:"contributors",id:"contributors",title:"Contributors",description:"Main Authors",source:"@site/community/contributors.md",sourceDirName:".",slug:"/contributors",permalink:"/community/contributors",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/contributors.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{sidebar_position:3},sidebar:"tutorialSidebar",previous:{title:"How to Contribute",permalink:"/community/how-to-contribute"}},b={},h=[{value:"Main Authors",id:"main-authors",level:2},{value:"Contributors",id:"contributors-1",level:2}],g={toc:h},f="wrapper";function y(e){let{components:t,...n}=e;return(0,a.kt)(f,(0,r.Z)({},g,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"contributors"},"Contributors"),(0,a.kt)("h2",{id:"main-authors"},"Main Authors"),(0,a.kt)(s,{mdxType:"MainAuthorRow"}),(0,a.kt)("h2",{id:"contributors-1"},"Contributors"),(0,a.kt)("p",null,"Thank you for contributing our tutorials!"),(0,a.kt)(m,{mdxType:"ContributorsRow"}))}y.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/9f898b75.fbac3ee7.js b/assets/js/9f898b75.15ac09e3.js similarity index 98% rename from assets/js/9f898b75.fbac3ee7.js rename to assets/js/9f898b75.15ac09e3.js index ea8fd590..aabc93bb 100644 --- a/assets/js/9f898b75.fbac3ee7.js +++ b/assets/js/9f898b75.15ac09e3.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2157],{3905:(t,e,a)=>{a.d(e,{Zo:()=>d,kt:()=>g});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function p(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),m=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):p(p({},e),t)),a},d=function(t){var e=m(t.components);return n.createElement(i.Provider,{value:e},t.children)},s="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},k=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,d=o(t,["components","mdxType","originalType","parentName"]),s=m(a),k=r,g=s["".concat(i,".").concat(k)]||s[k]||u[k]||l;return a?n.createElement(g,p(p({ref:e},d),{},{components:a})):n.createElement(g,p({ref:e},d))}));function g(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,p=new Array(l);p[0]=k;var o={};for(var i in e)hasOwnProperty.call(e,i)&&(o[i]=e[i]);o.originalType=t,o[s]="string"==typeof t?t:r,p[1]=o;for(var m=2;m{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>p,default:()=>u,frontMatter:()=>l,metadata:()=>o,toc:()=>m});var n=a(7462),r=(a(7294),a(3905));const l={title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},p=void 0,o={unversionedId:"further-readings/info",id:"version-1.0/further-readings/info",title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",description:"MLOps Component",source:"@site/versioned_docs/version-1.0/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/docs/1.0/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/further-readings/info.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",frontMatter:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/1.0/appendix/metallb"}},i={},m=[{value:"MLOps Component",id:"mlops-component",level:2}],d={toc:m},s="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(s,(0,n.Z)({},d,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/introduction/component"},"MLOps Concepts"),"\uc5d0\uc11c \ub2e4\ub8e8\uc5c8\ub358 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub3c4\uc2dd\ud654\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(6426).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"\uc774 \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8ec \uae30\uc220 \uc2a4\ud0dd\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(608).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"\ubcf4\uc2dc\ub294 \uac83\ucc98\ub7fc \uc544\uc9c1 \uc6b0\ub9ac\uac00 \ub2e4\ub8e8\uc9c0 \ubabb\ud55c \ub9ce\uc740 MLOps \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc2dc\uac04 \uad00\uacc4\uc0c1 \uc774\ubc88\uc5d0 \ubaa8\ub450 \ub2e4\ub8e8\uc9c0\ub294 \ubabb\ud588\uc9c0\ub9cc, \ub9cc\uc57d \ud544\uc694\ud558\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\ubcf4\uba74 \uc88b\uc744 \uac83 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(7777).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"\uc138\ubd80 \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},6426:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},608:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},7777:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2157],{3905:(t,e,a)=>{a.d(e,{Zo:()=>d,kt:()=>g});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function p(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),m=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):p(p({},e),t)),a},d=function(t){var e=m(t.components);return n.createElement(i.Provider,{value:e},t.children)},s="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},k=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,d=o(t,["components","mdxType","originalType","parentName"]),s=m(a),k=r,g=s["".concat(i,".").concat(k)]||s[k]||u[k]||l;return a?n.createElement(g,p(p({ref:e},d),{},{components:a})):n.createElement(g,p({ref:e},d))}));function g(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,p=new Array(l);p[0]=k;var o={};for(var i in e)hasOwnProperty.call(e,i)&&(o[i]=e[i]);o.originalType=t,o[s]="string"==typeof t?t:r,p[1]=o;for(var m=2;m{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>p,default:()=>u,frontMatter:()=>l,metadata:()=>o,toc:()=>m});var n=a(7462),r=(a(7294),a(3905));const l={title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},p=void 0,o={unversionedId:"further-readings/info",id:"version-1.0/further-readings/info",title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",description:"MLOps Component",source:"@site/versioned_docs/version-1.0/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/docs/1.0/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/further-readings/info.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",frontMatter:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/1.0/appendix/metallb"}},i={},m=[{value:"MLOps Component",id:"mlops-component",level:2}],d={toc:m},s="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(s,(0,n.Z)({},d,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/introduction/component"},"MLOps Concepts"),"\uc5d0\uc11c \ub2e4\ub8e8\uc5c8\ub358 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub3c4\uc2dd\ud654\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(6426).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"\uc774 \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8ec \uae30\uc220 \uc2a4\ud0dd\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(608).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"\ubcf4\uc2dc\ub294 \uac83\ucc98\ub7fc \uc544\uc9c1 \uc6b0\ub9ac\uac00 \ub2e4\ub8e8\uc9c0 \ubabb\ud55c \ub9ce\uc740 MLOps \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc2dc\uac04 \uad00\uacc4\uc0c1 \uc774\ubc88\uc5d0 \ubaa8\ub450 \ub2e4\ub8e8\uc9c0\ub294 \ubabb\ud588\uc9c0\ub9cc, \ub9cc\uc57d \ud544\uc694\ud558\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\ubcf4\uba74 \uc88b\uc744 \uac83 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(7777).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"\uc138\ubd80 \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},6426:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},608:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},7777:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file diff --git a/assets/js/a606c19a.dc487a08.js b/assets/js/a606c19a.77fee20b.js similarity index 98% rename from assets/js/a606c19a.dc487a08.js rename to assets/js/a606c19a.77fee20b.js index 0fa6b55e..5a90f3a6 100644 --- a/assets/js/a606c19a.dc487a08.js +++ b/assets/js/a606c19a.77fee20b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4555],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var i=n.createContext({}),u=function(e){var t=n.useContext(i),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=o,f=c["".concat(i,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,s(s({ref:t},p),{},{components:r})):n.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,s=new Array(a);s[0]=m;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"kubeflow-dashboard-guide/volumes",id:"kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/docs/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/docs/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/volumes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/docs/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/docs/kubeflow-dashboard-guide/experiments"}},i={},u=[{value:"Volumes",id:"volumes",level:2},{value:"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30",id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30",level:2}],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"volumes"},"Volumes"),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Volumes\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"volumes",src:r(4156).Z,width:"1386",height:"382"})),(0,o.kt)("p",null,"Volumes \ud0ed\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/volumes/"},"Kubernetes\uc758 \ubcfc\ub968(Volume)"),", \uc815\ud655\ud788\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/persistent-volumes/"},"\ud37c\uc2dc\uc2a4\ud134\ud2b8 \ubcfc\ub968 \ud074\ub808\uc784(Persistent Volume Claim, \uc774\ud558 pvc)")," \uc911 \ud604\uc7ac user\uc758 namespace\uc5d0 \uc18d\ud55c pvc\ub97c \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc704 \uc2a4\ud06c\ub9b0\uc0f7\uc744 \ubcf4\uba74, ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," \ud398\uc774\uc9c0\uc5d0\uc11c \uc0dd\uc131\ud55c Volume\uc758 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 Volume\uc758 Storage Class\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c Default Storage Class\uc778 local-path\ub85c \uc124\uc815\ub418\uc5b4\uc788\uc74c\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc678\uc5d0\ub3c4 user namespace\uc5d0 \uc0c8\ub85c\uc6b4 \ubcfc\ub968\uc744 \uc0dd\uc131\ud558\uac70\ub098, \uc870\ud68c\ud558\uac70\ub098, \uc0ad\uc81c\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0 Volumes \ud398\uc774\uc9c0\ub97c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("hr",null),(0,o.kt)("h2",{id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30"},"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30"),(0,o.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,o.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"new-volume",src:r(1037).Z,width:"1192",height:"934"})),(0,o.kt)("p",null,"name, size, storage class, access mode\ub97c \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6d0\ud558\ub294 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc744 \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcfc\ub968\uc758 Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending"),"\uc73c\ub85c \uc870\ud68c\ub429\ub2c8\ub2e4. ",(0,o.kt)("inlineCode",{parentName:"p"},"Status")," \uc544\uc774\ucf58\uc5d0 \ub9c8\uc6b0\uc2a4 \ucee4\uc11c\ub97c \uac00\uc838\ub2e4 \ub300\uba74 ",(0,o.kt)("em",{parentName:"p"},"\ud574\ub2f9 \ubcfc\ub968\uc740 mount\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 first consumer\uac00 \ub098\ud0c0\ub0a0 \ub54c \uc2e4\uc81c\ub85c \uc0dd\uc131\uc744 \uc9c4\ud589\ud55c\ub2e4(This volume will be bound when its first consumer is created.)"),"\ub294 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass"),"\uc778 ",(0,o.kt)("inlineCode",{parentName:"p"},"local-path"),"\uc758 \ubcfc\ub968 \uc0dd\uc131 \uc815\ucc45\uc5d0 \ud574\ub2f9\ud558\uba70, ",(0,o.kt)("strong",{parentName:"p"},"\ubb38\uc81c \uc0c1\ud669\uc774 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \ud398\uc774\uc9c0\uc5d0\uc11c Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending")," \uc73c\ub85c \ubcf4\uc774\ub354\ub77c\ub3c4 \ud574\ub2f9 \ubcfc\ub968\uc744 \uc0ac\uc6a9\ud558\uae38 \uc6d0\ud558\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ud639\uc740 \ud30c\ub4dc(Pod)\uc5d0\uc11c\ub294 \ud574\ub2f9 \ubcfc\ub968\uc758 \uc774\ub984\uc744 \uc9c0\uc815\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc73c\uba70, \uadf8\ub54c \uc2e4\uc81c\ub85c \ubcfc\ub968 \uc0dd\uc131\uc774 \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"creating-volume",src:r(4502).Z,width:"1572",height:"450"})))}d.isMDXComponent=!0},4502:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1037:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},4156:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4555],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var i=n.createContext({}),u=function(e){var t=n.useContext(i),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=o,f=c["".concat(i,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,s(s({ref:t},p),{},{components:r})):n.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,s=new Array(a);s[0]=m;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"kubeflow-dashboard-guide/volumes",id:"kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/docs/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/docs/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/volumes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/docs/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/docs/kubeflow-dashboard-guide/experiments"}},i={},u=[{value:"Volumes",id:"volumes",level:2},{value:"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30",id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30",level:2}],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"volumes"},"Volumes"),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Volumes\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"volumes",src:r(4156).Z,width:"1386",height:"382"})),(0,o.kt)("p",null,"Volumes \ud0ed\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/volumes/"},"Kubernetes\uc758 \ubcfc\ub968(Volume)"),", \uc815\ud655\ud788\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/persistent-volumes/"},"\ud37c\uc2dc\uc2a4\ud134\ud2b8 \ubcfc\ub968 \ud074\ub808\uc784(Persistent Volume Claim, \uc774\ud558 pvc)")," \uc911 \ud604\uc7ac user\uc758 namespace\uc5d0 \uc18d\ud55c pvc\ub97c \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc704 \uc2a4\ud06c\ub9b0\uc0f7\uc744 \ubcf4\uba74, ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," \ud398\uc774\uc9c0\uc5d0\uc11c \uc0dd\uc131\ud55c Volume\uc758 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 Volume\uc758 Storage Class\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c Default Storage Class\uc778 local-path\ub85c \uc124\uc815\ub418\uc5b4\uc788\uc74c\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc678\uc5d0\ub3c4 user namespace\uc5d0 \uc0c8\ub85c\uc6b4 \ubcfc\ub968\uc744 \uc0dd\uc131\ud558\uac70\ub098, \uc870\ud68c\ud558\uac70\ub098, \uc0ad\uc81c\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0 Volumes \ud398\uc774\uc9c0\ub97c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("hr",null),(0,o.kt)("h2",{id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30"},"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30"),(0,o.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,o.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"new-volume",src:r(1037).Z,width:"1192",height:"934"})),(0,o.kt)("p",null,"name, size, storage class, access mode\ub97c \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6d0\ud558\ub294 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc744 \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcfc\ub968\uc758 Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending"),"\uc73c\ub85c \uc870\ud68c\ub429\ub2c8\ub2e4. ",(0,o.kt)("inlineCode",{parentName:"p"},"Status")," \uc544\uc774\ucf58\uc5d0 \ub9c8\uc6b0\uc2a4 \ucee4\uc11c\ub97c \uac00\uc838\ub2e4 \ub300\uba74 ",(0,o.kt)("em",{parentName:"p"},"\ud574\ub2f9 \ubcfc\ub968\uc740 mount\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 first consumer\uac00 \ub098\ud0c0\ub0a0 \ub54c \uc2e4\uc81c\ub85c \uc0dd\uc131\uc744 \uc9c4\ud589\ud55c\ub2e4(This volume will be bound when its first consumer is created.)"),"\ub294 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass"),"\uc778 ",(0,o.kt)("inlineCode",{parentName:"p"},"local-path"),"\uc758 \ubcfc\ub968 \uc0dd\uc131 \uc815\ucc45\uc5d0 \ud574\ub2f9\ud558\uba70, ",(0,o.kt)("strong",{parentName:"p"},"\ubb38\uc81c \uc0c1\ud669\uc774 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \ud398\uc774\uc9c0\uc5d0\uc11c Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending")," \uc73c\ub85c \ubcf4\uc774\ub354\ub77c\ub3c4 \ud574\ub2f9 \ubcfc\ub968\uc744 \uc0ac\uc6a9\ud558\uae38 \uc6d0\ud558\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ud639\uc740 \ud30c\ub4dc(Pod)\uc5d0\uc11c\ub294 \ud574\ub2f9 \ubcfc\ub968\uc758 \uc774\ub984\uc744 \uc9c0\uc815\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc73c\uba70, \uadf8\ub54c \uc2e4\uc81c\ub85c \ubcfc\ub968 \uc0dd\uc131\uc774 \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"creating-volume",src:r(4502).Z,width:"1572",height:"450"})))}d.isMDXComponent=!0},4502:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1037:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},4156:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file diff --git a/assets/js/a7958b24.56332bed.js b/assets/js/a7958b24.7400f567.js similarity index 99% rename from assets/js/a7958b24.56332bed.js rename to assets/js/a7958b24.7400f567.js index d0aed55e..017e8f0e 100644 --- a/assets/js/a7958b24.56332bed.js +++ b/assets/js/a7958b24.7400f567.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3255],{3905:(n,e,t)=>{t.d(e,{Zo:()=>m,kt:()=>c});var a=t(7294);function r(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function p(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(r[t]=n[t]);return r}(n,e);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(r[t]=n[t])}return r}var o=a.createContext({}),s=function(n){var e=a.useContext(o),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},m=function(n){var e=s(n.components);return a.createElement(o.Provider,{value:e},n.children)},d="mdxType",_={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,r=n.mdxType,p=n.originalType,o=n.parentName,m=l(n,["components","mdxType","originalType","parentName"]),d=s(t),u=r,c=d["".concat(o,".").concat(u)]||d[u]||_[u]||p;return t?a.createElement(c,i(i({ref:e},m),{},{components:t})):a.createElement(c,i({ref:e},m))}));function c(n,e){var t=arguments,r=e&&e.mdxType;if("string"==typeof n||r){var p=t.length,i=new Array(p);i[0]=u;var l={};for(var o in e)hasOwnProperty.call(e,o)&&(l[o]=e[o]);l.originalType=n,l[d]="string"==typeof n?n:r,i[1]=l;for(var s=2;s{t.r(e),t.d(e,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>p,metadata:()=>l,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const p={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/advanced-environment",id:"kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/docs/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/docs/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-environment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/docs/kubeflow/advanced-pipeline"}},o={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95",id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],m={toc:s},d="wrapper";function _(n){let{components:e,...t}=n;return(0,r.kt)(d,(0,a.Z)({},m,t,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),"\uc5d0\uc11c \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uba74 \uc2e4\ud328\ud558\uac8c \ub429\ub2c8\ub2e4. \uc65c \uc2e4\ud328\ud558\ub294\uc9c0 \uc54c\uc544\ubcf4\uace0 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uc218 \uc788\ub3c4\ub85d \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-component#convert-to-kubeflow-format"},"\uc55e\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8"),"\ub97c yaml\ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc744 \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc5d0 \ub530\ub974\uba74 \uc774 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \uc704\uc5d0\uc11c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc624\ub958\uac00 \ubc1c\uc0dd\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ub418\ub294 \ubc29\uc2dd\uc5d0 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \uac01\uac01 \ub3c5\ub9bd\ub41c \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc790\uc138\ud788 \ubcf4\uba74 \uc0dd\uc131\ub41c \ub9cc\ub4e0 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \uc5d0\uc11c \uc815\ud574\uc9c4 \uc774\ubbf8\uc9c0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc5b4\ub5a4 \uc774\uc720 \ub54c\ubb38\uc5d0 \uc2e4\ud589\uc774 \uc548 \ub418\ub294\uc9c0 \ub208\uce58\ucc44\uc2e0 \ubd84\ub4e4\ub3c4 \uc788\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," \uc774\ubbf8\uc9c0\uc5d0\ub294 \uc6b0\ub9ac\uac00 \uc0ac\uc6a9\ud558\uace0\uc790 \ud558\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn")," \uc774 \uc124\uce58\ub418\uc5b4 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7ec\ubbc0\ub85c \uc2e4\ud589\ud560 \ub54c \ud574\ub2f9 \ud328\ud0a4\uc9c0\uac00 \uc874\uc7ac\ud558\uc9c0 \uc54a\ub294\ub2e4\ub294 \uc5d0\ub7ec\uc640 \ud568\uaed8 \uc2e4\ud589\uc774 \uc548 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub7fc \uc5b4\ub5bb\uac8c \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc744\uae4c\uc694?"),(0,r.kt)("h2",{id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95"},"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95"),(0,r.kt)("p",null,"Kubeflow\ub97c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub450 \uac00\uc9c0 \ubc29\ubc95\uc744 \ud1b5\ud574 \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image")," \uc0ac\uc6a9"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install")," \uc0ac\uc6a9")),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud560 \ub54c \uc0ac\uc6a9\ud588\ub358 \ud568\uc218 ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," \uac00 \uc5b4\ub5a4 argument\ub4e4\uc744 \ubc1b\uc744 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": \ucef4\ud3ec\ub10c\ud2b8\ub85c \ub9cc\ub4e4 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c \ud568\uc218"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ud560 \uc774\ubbf8\uc9c0"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc11c \ucd94\uac00\ub85c \uc124\uce58\ud574\uc57c \ud558\ub294 \ud328\ud0a4\uc9c0")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc0ac\uc6a9\ud558\ub294 base_image\uc5d0 \ud328\ud0a4\uc9c0\ub4e4\uc774 \uc804\ubd80 \uc124\uce58\ub418\uc5b4 \uc788\ub2e4\uba74 \ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0 \uc124\uce58 \uc5c6\uc774 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 Dockerfile\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"\uc704\uc758 Dockerfile\uc744 \uc774\uc6a9\ud574 \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \uc2e4\uc2b5\uc5d0\uc11c \uc0ac\uc6a9\ud574\ubcfc \ub3c4\ucee4 \ud5c8\ube0c\ub294 ghcr\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uac01\uc790 \ud658\uacbd\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ub3c4\ucee4 \ud5c8\ube0c\ub97c \uc120\ud0dd \ud6c4 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"\uc774\uc81c base_image\ub97c \uc785\ub825\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"base_image\uac00 \uc6b0\ub9ac\uac00 \uc124\uc815\ud55c \uac12\uc73c\ub85c \ubc14\ub010 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \ud328\ud0a4\uc9c0\uac00 \ucd94\uac00\ub420 \ub54c\ub9c8\ub2e4 docker \uc774\ubbf8\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \uc0c8\ub85c \uc0dd\uc131\ud558\ub294 \uc791\uc5c5\uc740 \ub9ce\uc740 \uc2dc\uac04\uc774 \uc18c\uc694\ub429\ub2c8\ub2e4.\n\uc774 \ub54c, ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument \ub97c \uc0ac\uc6a9\ud558\uba74 \ud328\ud0a4\uc9c0\ub97c \ucee8\ud14c\uc774\ub108\uc5d0 \uc27d\uac8c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"\uc704\uc5d0 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\uc0dd\uc131\ub41c yaml \ud30c\uc77c\uc744 \uc790\uc138\ud788 \ubcf4\uba74, \ub2e4\uc74c\uacfc \uac19\uc740 \uc904\uc774 \uc790\ub3d9\uc73c\ub85c \ucd94\uac00\ub418\uc5b4 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uae30 \ub54c\ubb38\uc5d0 \uc624\ub958 \uc5c6\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}_.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3255],{3905:(n,e,t)=>{t.d(e,{Zo:()=>m,kt:()=>c});var a=t(7294);function r(n,e,t){return e in n?Object.defineProperty(n,e,{value:t,enumerable:!0,configurable:!0,writable:!0}):n[e]=t,n}function p(n,e){var t=Object.keys(n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(n);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),t.push.apply(t,a)}return t}function i(n){for(var e=1;e=0||(r[t]=n[t]);return r}(n,e);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(n);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(n,t)&&(r[t]=n[t])}return r}var o=a.createContext({}),s=function(n){var e=a.useContext(o),t=e;return n&&(t="function"==typeof n?n(e):i(i({},e),n)),t},m=function(n){var e=s(n.components);return a.createElement(o.Provider,{value:e},n.children)},d="mdxType",_={inlineCode:"code",wrapper:function(n){var e=n.children;return a.createElement(a.Fragment,{},e)}},u=a.forwardRef((function(n,e){var t=n.components,r=n.mdxType,p=n.originalType,o=n.parentName,m=l(n,["components","mdxType","originalType","parentName"]),d=s(t),u=r,c=d["".concat(o,".").concat(u)]||d[u]||_[u]||p;return t?a.createElement(c,i(i({ref:e},m),{},{components:t})):a.createElement(c,i({ref:e},m))}));function c(n,e){var t=arguments,r=e&&e.mdxType;if("string"==typeof n||r){var p=t.length,i=new Array(p);i[0]=u;var l={};for(var o in e)hasOwnProperty.call(e,o)&&(l[o]=e[o]);l.originalType=n,l[d]="string"==typeof n?n:r,i[1]=l;for(var s=2;s{t.r(e),t.d(e,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>p,metadata:()=>l,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const p={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/advanced-environment",id:"kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/docs/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/docs/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-environment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/docs/kubeflow/advanced-pipeline"}},o={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95",id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],m={toc:s},d="wrapper";function _(n){let{components:e,...t}=n;return(0,r.kt)(d,(0,a.Z)({},m,t,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),"\uc5d0\uc11c \uc791\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uba74 \uc2e4\ud328\ud558\uac8c \ub429\ub2c8\ub2e4. \uc65c \uc2e4\ud328\ud558\ub294\uc9c0 \uc54c\uc544\ubcf4\uace0 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uc218 \uc788\ub3c4\ub85d \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-component#convert-to-kubeflow-format"},"\uc55e\uc5d0\uc11c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8"),"\ub97c yaml\ud30c\uc77c\ub85c \ubcc0\ud658\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc704\uc758 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc744 \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"\uc55e\uc11c ",(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component"),"\uc5d0\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc5d0 \ub530\ub974\uba74 \uc774 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \uc704\uc5d0\uc11c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \uc624\ub958\uac00 \ubc1c\uc0dd\ud558\uac8c \ub429\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ub418\ub294 \ubc29\uc2dd\uc5d0 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \uac01\uac01 \ub3c5\ub9bd\ub41c \ucee8\ud14c\uc774\ub108 \uc704\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc790\uc138\ud788 \ubcf4\uba74 \uc0dd\uc131\ub41c \ub9cc\ub4e0 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \uc5d0\uc11c \uc815\ud574\uc9c4 \uc774\ubbf8\uc9c0\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7")," \uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\uc81c \uc5b4\ub5a4 \uc774\uc720 \ub54c\ubb38\uc5d0 \uc2e4\ud589\uc774 \uc548 \ub418\ub294\uc9c0 \ub208\uce58\ucc44\uc2e0 \ubd84\ub4e4\ub3c4 \uc788\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," \uc774\ubbf8\uc9c0\uc5d0\ub294 \uc6b0\ub9ac\uac00 \uc0ac\uc6a9\ud558\uace0\uc790 \ud558\ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn")," \uc774 \uc124\uce58\ub418\uc5b4 \uc788\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7ec\ubbc0\ub85c \uc2e4\ud589\ud560 \ub54c \ud574\ub2f9 \ud328\ud0a4\uc9c0\uac00 \uc874\uc7ac\ud558\uc9c0 \uc54a\ub294\ub2e4\ub294 \uc5d0\ub7ec\uc640 \ud568\uaed8 \uc2e4\ud589\uc774 \uc548 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uadf8\ub7fc \uc5b4\ub5bb\uac8c \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc744\uae4c\uc694?"),(0,r.kt)("h2",{id:"\ud328\ud0a4\uc9c0-\ucd94\uac00-\ubc29\ubc95"},"\ud328\ud0a4\uc9c0 \ucd94\uac00 \ubc29\ubc95"),(0,r.kt)("p",null,"Kubeflow\ub97c \ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub450 \uac00\uc9c0 \ubc29\ubc95\uc744 \ud1b5\ud574 \ud328\ud0a4\uc9c0\ub97c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image")," \uc0ac\uc6a9"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install")," \uc0ac\uc6a9")),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud560 \ub54c \uc0ac\uc6a9\ud588\ub358 \ud568\uc218 ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," \uac00 \uc5b4\ub5a4 argument\ub4e4\uc744 \ubc1b\uc744 \uc218 \uc788\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": \ucef4\ud3ec\ub10c\ud2b8\ub85c \ub9cc\ub4e4 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c \ud568\uc218"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \uc2e4\ud589\ud560 \uc774\ubbf8\uc9c0"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud574\uc11c \ucd94\uac00\ub85c \uc124\uce58\ud574\uc57c \ud558\ub294 \ud328\ud0a4\uc9c0")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\ub9cc\uc57d \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc0ac\uc6a9\ud558\ub294 base_image\uc5d0 \ud328\ud0a4\uc9c0\ub4e4\uc774 \uc804\ubd80 \uc124\uce58\ub418\uc5b4 \uc788\ub2e4\uba74 \ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0 \uc124\uce58 \uc5c6\uc774 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 Dockerfile\uc744 \uc791\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"\uc704\uc758 Dockerfile\uc744 \uc774\uc6a9\ud574 \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \uc2e4\uc2b5\uc5d0\uc11c \uc0ac\uc6a9\ud574\ubcfc \ub3c4\ucee4 \ud5c8\ube0c\ub294 ghcr\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uac01\uc790 \ud658\uacbd\uc5d0 \ub9de\ucd94\uc5b4\uc11c \ub3c4\ucee4 \ud5c8\ube0c\ub97c \uc120\ud0dd \ud6c4 \uc5c5\ub85c\ub4dc\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"\uc774\uc81c base_image\ub97c \uc785\ub825\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc774\uc81c \uc0dd\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ucef4\ud30c\uc77c\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"base_image\uac00 \uc6b0\ub9ac\uac00 \uc124\uc815\ud55c \uac12\uc73c\ub85c \ubc14\ub010 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"\ud558\uc9c0\ub9cc \ud328\ud0a4\uc9c0\uac00 \ucd94\uac00\ub420 \ub54c\ub9c8\ub2e4 docker \uc774\ubbf8\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \uc0c8\ub85c \uc0dd\uc131\ud558\ub294 \uc791\uc5c5\uc740 \ub9ce\uc740 \uc2dc\uac04\uc774 \uc18c\uc694\ub429\ub2c8\ub2e4.\n\uc774 \ub54c, ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument \ub97c \uc0ac\uc6a9\ud558\uba74 \ud328\ud0a4\uc9c0\ub97c \ucee8\ud14c\uc774\ub108\uc5d0 \uc27d\uac8c \ucd94\uac00\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"\uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," \ud30c\uc77c\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"\uc704\uc5d0 \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\ub418\ub294 \uc21c\uc11c\ub97c \uc880 \ub354 \uc790\uc138\ud788 \ub4e4\uc5ec\ub2e4\ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"\uc0dd\uc131\ub41c yaml \ud30c\uc77c\uc744 \uc790\uc138\ud788 \ubcf4\uba74, \ub2e4\uc74c\uacfc \uac19\uc740 \uc904\uc774 \uc790\ub3d9\uc73c\ub85c \ucd94\uac00\ub418\uc5b4 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uae30 \ub54c\ubb38\uc5d0 \uc624\ub958 \uc5c6\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}_.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/affd256f.1933e19e.js b/assets/js/affd256f.8d1b9b6b.js similarity index 99% rename from assets/js/affd256f.1933e19e.js rename to assets/js/affd256f.8d1b9b6b.js index 693731a4..1e7dd8d5 100644 --- a/assets/js/affd256f.1933e19e.js +++ b/assets/js/affd256f.8d1b9b6b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3457],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},k="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),k=s(a),c=r,d=k["".concat(p,".").concat(c)]||k[c]||m[c]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=c;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[k]="string"==typeof e?e:r,l[1]=i;for(var s=2;s{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const o={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},l=void 0,i={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/docs/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/docs/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/docs/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/docs/kubeflow-dashboard-guide/tensorboards"}},p={},s=[{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30",level:2}],u={toc:s},k="wrapper";function m(e){let{components:t,...o}=e;return(0,r.kt)(k,(0,n.Z)({},u,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30"),(0,r.kt)("p",null,"\ub2e4\uc74c Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Notebooks\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:a(7511).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"Notebooks \ud0ed\uc740 JupyterHub\uc640 \ube44\uc2b7\ud558\uac8c \uc720\uc800\ubcc4\ub85c jupyter notebook \ubc0f code server \ud658\uacbd(\uc774\ud558 \ub178\ud2b8\ubd81 \uc11c\ubc84)\uc744 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc0dd\uc131\ud558\uace0 \uc811\uc18d\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-home",src:a(1288).Z,width:"5008",height:"2682"})),(0,r.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW NOTEBOOK")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-notebook",src:a(8666).Z,width:"1900",height:"312"})),(0,r.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \ud654\uba74\uc774 \ub098\ud0c0\ub098\uba74, \uc774\uc81c \uc0dd\uc131\ud560 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc2a4\ud399(Spec)\uc744 \uba85\uc2dc\ud558\uc5ec \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create",src:a(4164).Z,width:"1738",height:"1674"})),(0,r.kt)("details",null,(0,r.kt)("summary",null,"\uac01 \uc2a4\ud399\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"name"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uad6c\ubd84\ud560 \uc218 \uc788\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"namespace")," :",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub530\ub85c \ubcc0\uacbd\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (\ud604\uc7ac \ub85c\uadf8\uc778\ud55c user \uacc4\uc815\uc758 namespace\uc774 \uc790\ub3d9\uc73c\ub85c \uc9c0\uc815\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.)"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Image"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"sklearn, pytorch, tensorflow \ub4f1\uc758 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0\uac00 \ubbf8\ub9ac \uc124\uce58\ub41c jupyter lab \uc774\ubbf8\uc9c0 \uc911 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c GPU\ub97c \uc0ac\uc6a9\ud558\uc5ec tensorflow-cuda, pytorch-cuda \ub4f1\uc758 \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, ",(0,r.kt)("strong",{parentName:"li"},"\ud558\ub2e8\uc758 GPUs")," \ubd80\ubd84\uc744 \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0\ub098 \uc18c\uc2a4\ucf54\ub4dc \ub4f1\uc744 \ud3ec\ud568\ud55c \ucee4\uc2a4\ud140(Custom) \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ucee4\uc2a4\ud140 \uc774\ubbf8\uc9c0(Custom Image)\ub97c \ub9cc\ub4e4\uace0 \ubc30\ud3ec \ud6c4 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"CPU / RAM"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ud544\uc694\ud55c \uc790\uc6d0 \uc0ac\uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"cpu : core \ub2e8\uc704",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uac00\uc0c1 core \uac1c\uc218 \ub2e8\uc704\ub97c \uc758\ubbf8\ud558\uba70, int \ud615\uc2dd\uc774 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,r.kt)("inlineCode",{parentName:"li"},"2.7")," \ub4f1\uc758 float \ud615\uc2dd\ub3c4 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"memory : Gi \ub2e8\uc704"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"GPUs"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc5d0 \ud560\ub2f9\ud560 GPU \uac1c\uc218\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"None"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU \uc790\uc6d0\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc740 \uc0c1\ud669"))),(0,r.kt)("li",{parentName:"ul"},"1, 2, 4",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU 1, 2, 4 \uac1c \ud560\ub2f9"))))),(0,r.kt)("li",{parentName:"ul"},"GPU Vendor",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc55e\uc758 ",(0,r.kt)("a",{parentName:"li",href:"/docs/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," \ub97c \ub530\ub77c nvidia gpu plugin\uc744 \uc124\uce58\ud558\uc600\ub2e4\uba74 NVIDIA\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Workspace Volume"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c \ud544\uc694\ud55c \ub9cc\ud07c\uc758 \ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"Type \uacfc Name \uc740 \ubcc0\uacbd\ud558\uc9c0 \uc54a\uace0, ",(0,r.kt)("strong",{parentName:"li"},"\ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \ub298\ub9ac\uace0 \uc2f6\uac70\ub098")," ",(0,r.kt)("strong",{parentName:"li"},"AccessMode \ub97c \ubcc0\uacbd\ud558\uace0 \uc2f6\uc744")," \ub54c\uc5d0\ub9cc \ubcc0\uacbd\ud574\uc11c \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," \uccb4\ud06c\ubc15\uc2a4\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc791\uc5c5 \ub0b4\uc6a9\uc744 \uc800\uc7a5\ud558\uc9c0 \uc54a\uc544\ub3c4 \uc0c1\uad00\uc5c6\uc744 \ub54c\uc5d0\ub9cc \ud074\ub9ad\ud569\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"li"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ub204\ub974\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.")),(0,r.kt)("li",{parentName:"ul"},'\uae30\uc874\uc5d0 \ubbf8\ub9ac \uc0dd\uc131\ud574\ub450\uc5c8\ub358 PVC\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \ub54c\uc5d0\ub294, Type\uc744 "Existing" \uc73c\ub85c \uc785\ub825\ud558\uc5ec \ud574\ub2f9 PVC\uc758 \uc774\ub984\uc744 \uc785\ub825\ud558\uc5ec \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.'))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Data Volumes"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \uc2a4\ud1a0\ub9ac\uc9c0 \uc790\uc6d0\uc774 \ud544\uc694\ud558\ub2e4\uba74 ",(0,r.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ud544\uc694\ud558\uc9c0 \uc54a\uc73c\ubbc0\ub85c ",(0,r.kt)("em",{parentName:"li"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \uc124\uba85\uc744 \uc0dd\ub7b5\ud569\ub2c8\ub2e4."))))),(0,r.kt)("p",null,"\ubaa8\ub450 \uc815\uc0c1\uc801\uc73c\ub85c \uc785\ub825\ud558\uc600\ub2e4\uba74 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},"LAUNCH")," \ubc84\ud2bc\uc774 \ud65c\uc131\ud654\ub418\uba70, \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131\uc774 \uc2dc\uc791\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating",src:a(8290).Z,width:"1928",height:"400"})),(0,r.kt)("p",null,"\uc0dd\uc131 \ud6c4 \uc544\ub798\uc640 \uac19\uc774 ",(0,r.kt)("strong",{parentName:"p"},"Status")," \uac00 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc \uc544\uc774\ucf58\uc73c\ub85c \ubcc0\ud558\uba70, ",(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc774 \ud65c\uc131\ud654\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"created",src:a(2519).Z,width:"1852",height:"352"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc744 \ud074\ub9ad\ud558\uba74 \ube0c\ub77c\uc6b0\uc800\uc5d0 \uc0c8 \ucc3d\uc774 \uc5f4\ub9ac\uba70, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-access",src:a(5675).Z,width:"2898",height:"1990"})),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Launcher"),"\uc758 Notebook, Console, Terminal \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Notebook \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-console",src:a(4831).Z,width:"2850",height:"736"})),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Terminal \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"terminal-console",src:a(4155).Z,width:"2834",height:"806"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30"),(0,r.kt)("p",null,"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc624\ub79c \uc2dc\uac04 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc758 \ud6a8\uc728\uc801\uc778 \ub9ac\uc18c\uc2a4 \uc0ac\uc6a9\uc744 \uc704\ud574\uc11c \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc911\ub2e8(Stop)\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ub2e8, \uc774 \uacbd\uc6b0 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \uc2dc Workspace Volume \ub610\ub294 Data Volume\uc73c\ub85c \uc9c0\uc815\ud574\ub193\uc740 \uacbd\ub85c \uc678\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \ucd08\uae30\ud654\ub418\ub294 \uac83\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("br",{parentName:"p"}),"\n","\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \ub2f9\uc2dc \uacbd\ub85c\ub97c \ubcc0\uacbd\ud558\uc9c0 \uc54a\uc558\ub2e4\uba74, \ub514\ud3f4\ud2b8(Default) Workspace Volume\uc758 \uacbd\ub85c\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc774\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc758 \ud558\uc704 \uacbd\ub85c \uc774\uc678\uc758 \uacbd\ub85c\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \uc0ac\ub77c\uc9d1\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"STOP")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84\uac00 \uc911\ub2e8\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-stop",src:a(1970).Z,width:"1832",height:"1014"})),(0,r.kt)("p",null,"\uc911\ub2e8\uc774 \uc644\ub8cc\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"CONNECT")," \ubc84\ud2bc\uc774 \ube44\ud65c\uc131\ud654\ub418\uba70, ",(0,r.kt)("inlineCode",{parentName:"p"},"PLAY")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc2dc \uc815\uc0c1\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-restart",src:a(8586).Z,width:"1888",height:"932"})))}m.isMDXComponent=!0},4164:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},2519:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},8290:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},7511:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},8666:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},5675:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},4831:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},1288:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},8586:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},1970:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},4155:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3457],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=n.createContext({}),s=function(e){var t=n.useContext(p),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=s(e.components);return n.createElement(p.Provider,{value:t},e.children)},k="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},c=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),k=s(a),c=r,d=k["".concat(p,".").concat(c)]||k[c]||m[c]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=c;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[k]="string"==typeof e?e:r,l[1]=i;for(var s=2;s{a.r(t),a.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const o={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},l=void 0,i={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/docs/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/docs/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/docs/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/docs/kubeflow-dashboard-guide/tensorboards"}},p={},s=[{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30",level:2},{value:"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30",id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30",level:2}],u={toc:s},k="wrapper";function m(e){let{components:t,...o}=e;return(0,r.kt)(k,(0,n.Z)({},u,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84notebook-server-\uc0dd\uc131\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84(Notebook Server) \uc0dd\uc131\ud558\uae30"),(0,r.kt)("p",null,"\ub2e4\uc74c Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Notebooks\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:a(7511).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"Notebooks \ud0ed\uc740 JupyterHub\uc640 \ube44\uc2b7\ud558\uac8c \uc720\uc800\ubcc4\ub85c jupyter notebook \ubc0f code server \ud658\uacbd(\uc774\ud558 \ub178\ud2b8\ubd81 \uc11c\ubc84)\uc744 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc0dd\uc131\ud558\uace0 \uc811\uc18d\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-home",src:a(1288).Z,width:"5008",height:"2682"})),(0,r.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW NOTEBOOK")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-notebook",src:a(8666).Z,width:"1900",height:"312"})),(0,r.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \ud654\uba74\uc774 \ub098\ud0c0\ub098\uba74, \uc774\uc81c \uc0dd\uc131\ud560 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc2a4\ud399(Spec)\uc744 \uba85\uc2dc\ud558\uc5ec \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"create",src:a(4164).Z,width:"1738",height:"1674"})),(0,r.kt)("details",null,(0,r.kt)("summary",null,"\uac01 \uc2a4\ud399\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"name"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uad6c\ubd84\ud560 \uc218 \uc788\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ud569\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"namespace")," :",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub530\ub85c \ubcc0\uacbd\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (\ud604\uc7ac \ub85c\uadf8\uc778\ud55c user \uacc4\uc815\uc758 namespace\uc774 \uc790\ub3d9\uc73c\ub85c \uc9c0\uc815\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.)"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Image"),":",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"sklearn, pytorch, tensorflow \ub4f1\uc758 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0\uac00 \ubbf8\ub9ac \uc124\uce58\ub41c jupyter lab \uc774\ubbf8\uc9c0 \uc911 \uc0ac\uc6a9\ud560 \uc774\ubbf8\uc9c0\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c GPU\ub97c \uc0ac\uc6a9\ud558\uc5ec tensorflow-cuda, pytorch-cuda \ub4f1\uc758 \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, ",(0,r.kt)("strong",{parentName:"li"},"\ud558\ub2e8\uc758 GPUs")," \ubd80\ubd84\uc744 \ud655\uc778\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \ud328\ud0a4\uc9c0\ub098 \uc18c\uc2a4\ucf54\ub4dc \ub4f1\uc744 \ud3ec\ud568\ud55c \ucee4\uc2a4\ud140(Custom) \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ucee4\uc2a4\ud140 \uc774\ubbf8\uc9c0(Custom Image)\ub97c \ub9cc\ub4e4\uace0 \ubc30\ud3ec \ud6c4 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"CPU / RAM"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ud544\uc694\ud55c \uc790\uc6d0 \uc0ac\uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"cpu : core \ub2e8\uc704",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uac00\uc0c1 core \uac1c\uc218 \ub2e8\uc704\ub97c \uc758\ubbf8\ud558\uba70, int \ud615\uc2dd\uc774 \uc544\ub2cc ",(0,r.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,r.kt)("inlineCode",{parentName:"li"},"2.7")," \ub4f1\uc758 float \ud615\uc2dd\ub3c4 \uc785\ub825\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},"memory : Gi \ub2e8\uc704"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"GPUs"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc5d0 \ud560\ub2f9\ud560 GPU \uac1c\uc218\ub97c \uc785\ub825\ud569\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"None"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU \uc790\uc6d0\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc740 \uc0c1\ud669"))),(0,r.kt)("li",{parentName:"ul"},"1, 2, 4",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"GPU 1, 2, 4 \uac1c \ud560\ub2f9"))))),(0,r.kt)("li",{parentName:"ul"},"GPU Vendor",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc55e\uc758 ",(0,r.kt)("a",{parentName:"li",href:"/docs/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," \ub97c \ub530\ub77c nvidia gpu plugin\uc744 \uc124\uce58\ud558\uc600\ub2e4\uba74 NVIDIA\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Workspace Volume"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc5d0\uc11c \ud544\uc694\ud55c \ub9cc\ud07c\uc758 \ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,r.kt)("li",{parentName:"ul"},"Type \uacfc Name \uc740 \ubcc0\uacbd\ud558\uc9c0 \uc54a\uace0, ",(0,r.kt)("strong",{parentName:"li"},"\ub514\uc2a4\ud06c \uc6a9\ub7c9\uc744 \ub298\ub9ac\uace0 \uc2f6\uac70\ub098")," ",(0,r.kt)("strong",{parentName:"li"},"AccessMode \ub97c \ubcc0\uacbd\ud558\uace0 \uc2f6\uc744")," \ub54c\uc5d0\ub9cc \ubcc0\uacbd\ud574\uc11c \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," \uccb4\ud06c\ubc15\uc2a4\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84\uc758 \uc791\uc5c5 \ub0b4\uc6a9\uc744 \uc800\uc7a5\ud558\uc9c0 \uc54a\uc544\ub3c4 \uc0c1\uad00\uc5c6\uc744 \ub54c\uc5d0\ub9cc \ud074\ub9ad\ud569\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"li"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ub204\ub974\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.")),(0,r.kt)("li",{parentName:"ul"},'\uae30\uc874\uc5d0 \ubbf8\ub9ac \uc0dd\uc131\ud574\ub450\uc5c8\ub358 PVC\ub97c \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \ub54c\uc5d0\ub294, Type\uc744 "Existing" \uc73c\ub85c \uc785\ub825\ud558\uc5ec \ud574\ub2f9 PVC\uc758 \uc774\ub984\uc744 \uc785\ub825\ud558\uc5ec \uc0ac\uc6a9\ud558\uc2dc\uba74 \ub429\ub2c8\ub2e4.'))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Data Volumes"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\ucd94\uac00\uc801\uc778 \uc2a4\ud1a0\ub9ac\uc9c0 \uc790\uc6d0\uc774 \ud544\uc694\ud558\ub2e4\uba74 ",(0,r.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc73c\ub85c\ub294 \ud544\uc694\ud558\uc9c0 \uc54a\uc73c\ubbc0\ub85c ",(0,r.kt)("em",{parentName:"li"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \uc124\uba85\uc744 \uc0dd\ub7b5\ud569\ub2c8\ub2e4."))))),(0,r.kt)("p",null,"\ubaa8\ub450 \uc815\uc0c1\uc801\uc73c\ub85c \uc785\ub825\ud558\uc600\ub2e4\uba74 \ud558\ub2e8\uc758 ",(0,r.kt)("strong",{parentName:"p"},"LAUNCH")," \ubc84\ud2bc\uc774 \ud65c\uc131\ud654\ub418\uba70, \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131\uc774 \uc2dc\uc791\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating",src:a(8290).Z,width:"1928",height:"400"})),(0,r.kt)("p",null,"\uc0dd\uc131 \ud6c4 \uc544\ub798\uc640 \uac19\uc774 ",(0,r.kt)("strong",{parentName:"p"},"Status")," \uac00 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc \uc544\uc774\ucf58\uc73c\ub85c \ubcc0\ud558\uba70, ",(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc774 \ud65c\uc131\ud654\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"created",src:a(2519).Z,width:"1852",height:"352"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc811\uc18d\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc811\uc18d\ud558\uae30"),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"CONNECT \ubc84\ud2bc"),"\uc744 \ud074\ub9ad\ud558\uba74 \ube0c\ub77c\uc6b0\uc800\uc5d0 \uc0c8 \ucc3d\uc774 \uc5f4\ub9ac\uba70, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-access",src:a(5675).Z,width:"2898",height:"1990"})),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"Launcher"),"\uc758 Notebook, Console, Terminal \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Notebook \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-console",src:a(4831).Z,width:"2850",height:"736"})),(0,r.kt)("p",null," \uc0dd\uc131\ub41c Terminal \ud654\uba74"),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"terminal-console",src:a(4155).Z,width:"2834",height:"806"})),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"\ub178\ud2b8\ubd81-\uc11c\ubc84-\uc911\ub2e8\ud558\uae30"},"\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc911\ub2e8\ud558\uae30"),(0,r.kt)("p",null,"\ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc624\ub79c \uc2dc\uac04 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uacbd\uc6b0, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc758 \ud6a8\uc728\uc801\uc778 \ub9ac\uc18c\uc2a4 \uc0ac\uc6a9\uc744 \uc704\ud574\uc11c \ub178\ud2b8\ubd81 \uc11c\ubc84\ub97c \uc911\ub2e8(Stop)\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ub2e8, \uc774 \uacbd\uc6b0 \ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \uc2dc Workspace Volume \ub610\ub294 Data Volume\uc73c\ub85c \uc9c0\uc815\ud574\ub193\uc740 \uacbd\ub85c \uc678\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \ucd08\uae30\ud654\ub418\ub294 \uac83\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("br",{parentName:"p"}),"\n","\ub178\ud2b8\ubd81 \uc11c\ubc84 \uc0dd\uc131 \ub2f9\uc2dc \uacbd\ub85c\ub97c \ubcc0\uacbd\ud558\uc9c0 \uc54a\uc558\ub2e4\uba74, \ub514\ud3f4\ud2b8(Default) Workspace Volume\uc758 \uacbd\ub85c\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ub0b4\uc758 ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc774\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," \uc758 \ud558\uc704 \uacbd\ub85c \uc774\uc678\uc758 \uacbd\ub85c\uc5d0 \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 \ubaa8\ub450 \uc0ac\ub77c\uc9d1\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"STOP")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub178\ud2b8\ubd81 \uc11c\ubc84\uac00 \uc911\ub2e8\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-stop",src:a(1970).Z,width:"1832",height:"1014"})),(0,r.kt)("p",null,"\uc911\ub2e8\uc774 \uc644\ub8cc\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,r.kt)("inlineCode",{parentName:"p"},"CONNECT")," \ubc84\ud2bc\uc774 \ube44\ud65c\uc131\ud654\ub418\uba70, ",(0,r.kt)("inlineCode",{parentName:"p"},"PLAY")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc2dc \uc815\uc0c1\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"notebook-restart",src:a(8586).Z,width:"1888",height:"932"})))}m.isMDXComponent=!0},4164:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},2519:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},8290:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},7511:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},8666:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},5675:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},4831:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},1288:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},8586:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},1970:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},4155:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file diff --git a/assets/js/b0207dc0.2416ce84.js b/assets/js/b0207dc0.2ce11a4d.js similarity index 98% rename from assets/js/b0207dc0.2416ce84.js rename to assets/js/b0207dc0.2ce11a4d.js index 97048c7b..5c5b6bc7 100644 --- a/assets/js/b0207dc0.2416ce84.js +++ b/assets/js/b0207dc0.2ce11a4d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5597],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),m=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},u=function(e){var t=m(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,p=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),c=m(n),d=a,k=c["".concat(p,".").concat(d)]||c[d]||s[d]||o;return n?r.createElement(k,l(l({ref:t},u),{},{components:n})):r.createElement(k,l({ref:t},u))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=d;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var m=2;m{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>s,frontMatter:()=>o,metadata:()=>i,toc:()=>m});var r=n(7462),a=(n(7294),n(3905));const o={title:"How to Contribute",sidebar_position:2},l=void 0,i={unversionedId:"how-to-contribute",id:"how-to-contribute",title:"How to Contribute",description:"How to Start",source:"@site/community/how-to-contribute.md",sourceDirName:".",slug:"/how-to-contribute",permalink:"/community/how-to-contribute",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/how-to-contribute.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"How to Contribute",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"Community",permalink:"/community/community"},next:{title:"Contributors",permalink:"/community/contributors"}},p={},m=[{value:"How to Start",id:"how-to-start",level:2},{value:"Git Repo \uc900\ube44",id:"git-repo-\uc900\ube44",level:3},{value:"\ud658\uacbd \uc124\uc815",id:"\ud658\uacbd-\uc124\uc815",level:3},{value:"How to Contribute",id:"how-to-contribute",level:2},{value:"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c",id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c",level:3},{value:"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c",id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c",level:3},{value:"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c",id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c",level:3},{value:"After Pull Request",id:"after-pull-request",level:2}],u={toc:m},c="wrapper";function s(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"how-to-start"},"How to Start"),(0,a.kt)("h3",{id:"git-repo-\uc900\ube44"},"Git Repo \uc900\ube44"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io"},(0,a.kt)("em",{parentName:"a"},"\ubaa8\ub450\uc758 MLOps")," GitHub Repository"),"\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc5ec\ub7ec\ubd84\uc758 \uac1c\uc778 Repository\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Fork"),"\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Forked Repository\ub97c \uc5ec\ub7ec\ubd84\uc758 \uc791\uc5c5 \ud658\uacbd\uc73c\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"git clone"),"\ud569\ub2c8\ub2e4."))),(0,a.kt)("h3",{id:"\ud658\uacbd-\uc124\uc815"},"\ud658\uacbd \uc124\uc815"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ubaa8\ub450\uc758 MLOps\ub294 Hugo \uc640 Node\ub97c \uc774\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"li"}),"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"node & npm"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm --version\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"hugo"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hugo version\n")))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud544\uc694\ud55c node module\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm install\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uac01 \uae00\uc758 \uc77c\uad00\uc131\uc744 \uc704\ud574\uc11c \uc5ec\ub7ec markdown lint\ub97c \uc801\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 test\ub97c \uc9c4\ud589\ud55c \ud6c4 \ucee4\ubc0b\ud569\ub2c8\ub2e4.\ub0b4\uc6a9 \uc218\uc815 \ubc0f \ucd94\uac00 \ud6c4 lint\uac00 \ub9de\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm test\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"lint \ud655\uc778 \uc644\ub8cc \ud6c4 ci \ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm ci\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ub85c\uceec\uc5d0\uc11c \uc2e4\ud589 \ud6c4 \uc218\uc815\ud55c \uae00\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub098\uc624\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm run start\n")))),(0,a.kt)("h2",{id:"how-to-contribute"},"How to Contribute"),(0,a.kt)("h3",{id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c"},"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c"),(0,a.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub294 \uac01 \ucc55\ud130\uc640 \ud3ec\uc2a4\ud2b8\uc758 \uc704\uce58\uc5d0 \ub9de\ub294 weight\ub97c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Introduction: 1xx"),(0,a.kt)("li",{parentName:"ul"},"Setup: 2xx"),(0,a.kt)("li",{parentName:"ul"},"Kubeflow: 3xx"),(0,a.kt)("li",{parentName:"ul"},"API Deployment: 4xx"),(0,a.kt)("li",{parentName:"ul"},"Help: 10xx")),(0,a.kt)("h3",{id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c"},"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c"),(0,a.kt)("p",null,"\uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c Contributor\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'contributors: ["John Doe", "Adam Smith"]\n')),(0,a.kt)("h3",{id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c"},"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c"),(0,a.kt)("p",null,"\ub9cc\uc57d \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec \ud560 \ub54c ",(0,a.kt)("inlineCode",{parentName:"p"},"content/kor/contributors"),"\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc73c\ub85c \ud3f4\ub354\ub97c \uc0dd\uc131\ud55c \ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ub77c\ub294 \ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo kim"),"\uc774 \ubcf8\uc778\uc758 \uc601\uc5b4 \uc774\ub984\uc774\ub77c\uba74, \ud3f4\ub354\uba85\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo-kim"),"\uc73c\ub85c \ud558\uc5ec \ud574\ub2f9 \ud3f4\ub354 \ub0b4\ubd80\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ud30c\uc77c\uc5d0 \ub2e4\uc74c\uc758 \ub0b4\uc6a9\uc744 \uc791\uc131\ud569\ub2c8\ub2e4.\n\ud3f4\ub354\uba85\uc740 \ud558\uc774\ud508(-)\uc73c\ub85c \uc5f0\uacb0\ud55c \uc18c\ubb38\uc790\ub85c, title\uc740 \ub744\uc5b4\uc4f0\uae30\ub97c \ud3ec\ud568\ud55c CamelCase\ub85c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'---\ntitle: "John Doe"\ndraft: false\n---\n')),(0,a.kt)("h2",{id:"after-pull-request"},"After Pull Request"),(0,a.kt)("p",null,"Pull Request\ub97c \uc0dd\uc131\ud558\uba74 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uc790\ub3d9\uc73c\ub85c ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc6b4\uc601\uc9c4\uc5d0\uac8c \ub9ac\ubdf0 \uc694\uccad\uc774 \uc804\ud574\uc9d1\ub2c8\ub2e4. \ucd5c\ub300 \uc77c\uc8fc\uc77c \uc774\ub0b4\ub85c \ud655\uc778 \ud6c4 Comment\ub97c \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4."))}s.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5597],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),m=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},u=function(e){var t=m(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,p=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),c=m(n),d=a,k=c["".concat(p,".").concat(d)]||c[d]||s[d]||o;return n?r.createElement(k,l(l({ref:t},u),{},{components:n})):r.createElement(k,l({ref:t},u))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=d;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var m=2;m{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>s,frontMatter:()=>o,metadata:()=>i,toc:()=>m});var r=n(7462),a=(n(7294),n(3905));const o={title:"How to Contribute",sidebar_position:2},l=void 0,i={unversionedId:"how-to-contribute",id:"how-to-contribute",title:"How to Contribute",description:"How to Start",source:"@site/community/how-to-contribute.md",sourceDirName:".",slug:"/how-to-contribute",permalink:"/community/how-to-contribute",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/how-to-contribute.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"How to Contribute",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"Community",permalink:"/community/community"},next:{title:"Contributors",permalink:"/community/contributors"}},p={},m=[{value:"How to Start",id:"how-to-start",level:2},{value:"Git Repo \uc900\ube44",id:"git-repo-\uc900\ube44",level:3},{value:"\ud658\uacbd \uc124\uc815",id:"\ud658\uacbd-\uc124\uc815",level:3},{value:"How to Contribute",id:"how-to-contribute",level:2},{value:"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c",id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c",level:3},{value:"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c",id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c",level:3},{value:"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c",id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c",level:3},{value:"After Pull Request",id:"after-pull-request",level:2}],u={toc:m},c="wrapper";function s(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"how-to-start"},"How to Start"),(0,a.kt)("h3",{id:"git-repo-\uc900\ube44"},"Git Repo \uc900\ube44"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io"},(0,a.kt)("em",{parentName:"a"},"\ubaa8\ub450\uc758 MLOps")," GitHub Repository"),"\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc5ec\ub7ec\ubd84\uc758 \uac1c\uc778 Repository\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Fork"),"\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Forked Repository\ub97c \uc5ec\ub7ec\ubd84\uc758 \uc791\uc5c5 \ud658\uacbd\uc73c\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"git clone"),"\ud569\ub2c8\ub2e4."))),(0,a.kt)("h3",{id:"\ud658\uacbd-\uc124\uc815"},"\ud658\uacbd \uc124\uc815"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ubaa8\ub450\uc758 MLOps\ub294 Hugo \uc640 Node\ub97c \uc774\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"li"}),"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"node & npm"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm --version\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"hugo"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hugo version\n")))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud544\uc694\ud55c node module\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm install\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uac01 \uae00\uc758 \uc77c\uad00\uc131\uc744 \uc704\ud574\uc11c \uc5ec\ub7ec markdown lint\ub97c \uc801\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 test\ub97c \uc9c4\ud589\ud55c \ud6c4 \ucee4\ubc0b\ud569\ub2c8\ub2e4.\ub0b4\uc6a9 \uc218\uc815 \ubc0f \ucd94\uac00 \ud6c4 lint\uac00 \ub9de\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm test\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"lint \ud655\uc778 \uc644\ub8cc \ud6c4 ci \ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm ci\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ub85c\uceec\uc5d0\uc11c \uc2e4\ud589 \ud6c4 \uc218\uc815\ud55c \uae00\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub098\uc624\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm run start\n")))),(0,a.kt)("h2",{id:"how-to-contribute"},"How to Contribute"),(0,a.kt)("h3",{id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c"},"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c"),(0,a.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub294 \uac01 \ucc55\ud130\uc640 \ud3ec\uc2a4\ud2b8\uc758 \uc704\uce58\uc5d0 \ub9de\ub294 weight\ub97c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Introduction: 1xx"),(0,a.kt)("li",{parentName:"ul"},"Setup: 2xx"),(0,a.kt)("li",{parentName:"ul"},"Kubeflow: 3xx"),(0,a.kt)("li",{parentName:"ul"},"API Deployment: 4xx"),(0,a.kt)("li",{parentName:"ul"},"Help: 10xx")),(0,a.kt)("h3",{id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c"},"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c"),(0,a.kt)("p",null,"\uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c Contributor\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'contributors: ["John Doe", "Adam Smith"]\n')),(0,a.kt)("h3",{id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c"},"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c"),(0,a.kt)("p",null,"\ub9cc\uc57d \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec \ud560 \ub54c ",(0,a.kt)("inlineCode",{parentName:"p"},"content/kor/contributors"),"\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc73c\ub85c \ud3f4\ub354\ub97c \uc0dd\uc131\ud55c \ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ub77c\ub294 \ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo kim"),"\uc774 \ubcf8\uc778\uc758 \uc601\uc5b4 \uc774\ub984\uc774\ub77c\uba74, \ud3f4\ub354\uba85\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo-kim"),"\uc73c\ub85c \ud558\uc5ec \ud574\ub2f9 \ud3f4\ub354 \ub0b4\ubd80\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ud30c\uc77c\uc5d0 \ub2e4\uc74c\uc758 \ub0b4\uc6a9\uc744 \uc791\uc131\ud569\ub2c8\ub2e4.\n\ud3f4\ub354\uba85\uc740 \ud558\uc774\ud508(-)\uc73c\ub85c \uc5f0\uacb0\ud55c \uc18c\ubb38\uc790\ub85c, title\uc740 \ub744\uc5b4\uc4f0\uae30\ub97c \ud3ec\ud568\ud55c CamelCase\ub85c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'---\ntitle: "John Doe"\ndraft: false\n---\n')),(0,a.kt)("h2",{id:"after-pull-request"},"After Pull Request"),(0,a.kt)("p",null,"Pull Request\ub97c \uc0dd\uc131\ud558\uba74 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uc790\ub3d9\uc73c\ub85c ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc6b4\uc601\uc9c4\uc5d0\uac8c \ub9ac\ubdf0 \uc694\uccad\uc774 \uc804\ud574\uc9d1\ub2c8\ub2e4. \ucd5c\ub300 \uc77c\uc8fc\uc77c \uc774\ub0b4\ub85c \ud655\uc778 \ud6c4 Comment\ub97c \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4."))}s.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/b0739f8c.fa9d9f93.js b/assets/js/b0739f8c.bae8b43d.js similarity index 97% rename from assets/js/b0739f8c.fa9d9f93.js rename to assets/js/b0739f8c.bae8b43d.js index 03da9885..87a2a674 100644 --- a/assets/js/b0739f8c.fa9d9f93.js +++ b/assets/js/b0739f8c.bae8b43d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1948],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function a(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var s=n.createContext({}),p=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):a(a({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(r),f=o,b=c["".concat(s,".").concat(f)]||c[f]||d[f]||i;return r?n.createElement(b,a(a({ref:t},u),{},{components:r})):n.createElement(b,a({ref:t},u))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=r.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const i={title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},a=void 0,l={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"version-1.0/kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline \uad00\ub828",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/docs/1.0/kubeflow/kubeflow-intro"}},s={},p=[],u={toc:p},c="wrapper";function d(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,n.Z)({},u,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions \ud398\uc774\uc9c0\ub4e4\uc5d0\uc11c\ub294 Kubeflow Pipeline\uacfc Pipeline\uc758 \uc2e4\ud589 \uadf8\ub9ac\uace0 Pipeline Run\uc758 \uacb0\uacfc\ub97c \uad00\ub9ac\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipeline\uc774 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c Kubeflow\ub97c \uc0ac\uc6a9\ud558\ub294 \uc8fc\ub41c \uc774\uc720\uc774\uba70, Kubeflow Pipeline\uc744 \ub9cc\ub4dc\ub294 \ubc29\ubc95, \uc2e4\ud589\ud558\ub294 \ubc29\ubc95, \uacb0\uacfc\ub97c \ud655\uc778\ud558\ub294 \ubc29\ubc95 \ub4f1 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"\uc5d0\uc11c \ub2e4\ub8f9\ub2c8\ub2e4."))}d.isMDXComponent=!0},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1948],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function a(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var s=n.createContext({}),p=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):a(a({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(r),f=o,b=c["".concat(s,".").concat(f)]||c[f]||d[f]||i;return r?n.createElement(b,a(a({ref:t},u),{},{components:r})):n.createElement(b,a({ref:t},u))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=r.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>d,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const i={title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},a=void 0,l={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"version-1.0/kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline \uad00\ub828",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline \uad00\ub828",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/docs/1.0/kubeflow/kubeflow-intro"}},s={},p=[],u={toc:p},c="wrapper";function d(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,n.Z)({},u,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions \ud398\uc774\uc9c0\ub4e4\uc5d0\uc11c\ub294 Kubeflow Pipeline\uacfc Pipeline\uc758 \uc2e4\ud589 \uadf8\ub9ac\uace0 Pipeline Run\uc758 \uacb0\uacfc\ub97c \uad00\ub9ac\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipeline\uc774 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c Kubeflow\ub97c \uc0ac\uc6a9\ud558\ub294 \uc8fc\ub41c \uc774\uc720\uc774\uba70, Kubeflow Pipeline\uc744 \ub9cc\ub4dc\ub294 \ubc29\ubc95, \uc2e4\ud589\ud558\ub294 \ubc29\ubc95, \uacb0\uacfc\ub97c \ud655\uc778\ud558\ub294 \ubc29\ubc95 \ub4f1 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"\uc5d0\uc11c \ub2e4\ub8f9\ub2c8\ub2e4."))}d.isMDXComponent=!0},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/assets/js/b1ad0a9e.820522ff.js b/assets/js/b1ad0a9e.6ff93fcb.js similarity index 99% rename from assets/js/b1ad0a9e.820522ff.js rename to assets/js/b1ad0a9e.6ff93fcb.js index e44bdc0b..789f4895 100644 --- a/assets/js/b1ad0a9e.820522ff.js +++ b/assets/js/b1ad0a9e.6ff93fcb.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[956],{3905:(e,a,t)=>{t.d(a,{Zo:()=>p,kt:()=>b});var r=t(7294);function n(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function o(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);a&&(r=r.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var a=1;a=0||(n[t]=e[t]);return n}(e,a);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var l=r.createContext({}),c=function(e){var a=r.useContext(l),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},p=function(e){var a=c(e.components);return r.createElement(l.Provider,{value:a},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return r.createElement(r.Fragment,{},a)}},k=r.forwardRef((function(e,a){var t=e.components,n=e.mdxType,o=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),u=c(t),k=n,b=u["".concat(l,".").concat(k)]||u[k]||d[k]||o;return t?r.createElement(b,i(i({ref:a},p),{},{components:t})):r.createElement(b,i({ref:a},p))}));function b(e,a){var t=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var o=t.length,i=new Array(o);i[0]=k;var s={};for(var l in a)hasOwnProperty.call(a,l)&&(s[l]=a[l]);s.originalType=e,s[u]="string"==typeof e?e:n,i[1]=s;for(var c=2;c{t.r(a),t.d(a,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>c});var r=t(7462),n=(t(7294),t(3905));const o={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,s={unversionedId:"setup-components/install-components-kf",id:"setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/docs/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/docs/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-kf.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/docs/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/docs/setup-components/install-components-mlflow"}},l={},c=[{value:"\uc124\uce58 \ud30c\uc77c \uc900\ube44",id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44",level:2},{value:"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58",id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"OIDC AuthService",id:"oidc-authservice",level:3},{value:"Kubeflow Namespace",id:"kubeflow-namespace",level:3},{value:"Kubeflow Roles",id:"kubeflow-roles",level:3},{value:"Kubeflow Istio Resources",id:"kubeflow-istio-resources",level:3},{value:"Kubeflow Pipelines",id:"kubeflow-pipelines",level:3},{value:"Katib",id:"katib",level:3},{value:"Central Dashboard",id:"central-dashboard",level:3},{value:"Admission Webhook",id:"admission-webhook",level:3},{value:"Notebooks & Jupyter Web App",id:"notebooks--jupyter-web-app",level:3},{value:"Profiles + KFAM",id:"profiles--kfam",level:3},{value:"Volumes Web App",id:"volumes-web-app",level:3},{value:"Tensorboard & Tensorboard Web App",id:"tensorboard--tensorboard-web-app",level:3},{value:"Training Operator",id:"training-operator",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:c},u="wrapper";function d(e){let{components:a,...o}=e;return(0,n.kt)(u,(0,r.Z)({},p,o,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44"},"\uc124\uce58 \ud30c\uc77c \uc900\ube44"),(0,n.kt)("p",null,"Kubeflow ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ubc84\uc804\uc744 \uc124\uce58\ud558\uae30 \uc704\ud574\uc11c, \uc124\uce58\uc5d0 \ud544\uc694\ud55c manifests \ud30c\uc77c\ub4e4\uc744 \uc900\ube44\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," \ub97c ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ud0dc\uadf8\ub85c \uae43 \ud074\ub860\ud55c \ub4a4, \ud574\ub2f9 \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,n.kt)("h2",{id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58"},"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58"),(0,n.kt)("p",null,"kubeflow/manifests Repository \uc5d0 \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58 \ucee4\ub9e8\ub4dc\uac00 \uc801\ud600\uc838 \uc788\uc9c0\ub9cc, \uc124\uce58\ud558\uba70 \ubc1c\uc0dd\ud560 \uc218 \uc788\ub294 \uc774\uc288 \ud639\uc740 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc774 \uc801\ud600\uc838 \uc788\uc9c0 \uc54a\uc544 \ucc98\uc74c \uc124\uce58\ud558\ub294 \uacbd\uc6b0 \uc5b4\ub824\uc6c0\uc744 \uacaa\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c, \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4\ub85c \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc744 \ud568\uaed8 \uc791\uc131\ud569\ub2c8\ub2e4. "),(0,n.kt)("p",null,"\ub610\ud55c, \ubcf8 \ubb38\uc11c\uc5d0\uc11c\ub294 ",(0,n.kt)("strong",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8e8\uc9c0 \uc54a\ub294 \uad6c\uc131\uc694\uc18c\uc778 Knative, KFServing, MPI Operator \uc758 \uc124\uce58\ub294 \ub9ac\uc18c\uc2a4\uc758 \ud6a8\uc728\uc801 \uc0ac\uc6a9\uc744 \uc704\ud574 \ub530\ub85c \uc124\uce58\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"cert-manager \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,n.kt)("p",{parentName:"li"},"cert-manager namespace \uc758 3 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"kubeflow-issuer \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")))),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook \uc774\uc288"),(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook deployment \uac00 Running \uc774 \uc544\ub2cc \uacbd\uc6b0, \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud558\uba70 kubeflow-issuer\uac00 \uc124\uce58\ub418\uc9c0 \uc54a\uc744 \uc218 \uc788\uc74c\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c \uacbd\uc6b0, cert-manager \uc758 3\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub418\ub294 \uac83\uc744 \ud655\uc778\ud55c \uc774\ud6c4 \ub2e4\uc2dc \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,n.kt)("h3",{id:"istio"},"Istio"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \uad00\ub828 Custom Resource Definition(CRD) \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio namespace \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,n.kt)("p",{parentName:"li"},"istio-system namespace \uc758 2 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,n.kt)("h3",{id:"dex"},"Dex"),(0,n.kt)("p",null,"dex \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,n.kt)("p",null,"auth namespace \uc758 1 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,n.kt)("h3",{id:"oidc-authservice"},"OIDC AuthService"),(0,n.kt)("p",null,"OIDC AuthService \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,n.kt)("p",null,"istio-system namespace \uc5d0 authservice-0 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,n.kt)("h3",{id:"kubeflow-namespace"},"Kubeflow Namespace"),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,n.kt)("h3",{id:"kubeflow-roles"},"Kubeflow Roles"),(0,n.kt)("p",null,"kubeflow-roles \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 6\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,n.kt)("h3",{id:"kubeflow-istio-resources"},"Kubeflow Istio Resources"),(0,n.kt)("p",null,"kubeflow-istio-resources \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 3\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,n.kt)("p",null,"Kubeflow namespace \uc5d0 gateway \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,n.kt)("h3",{id:"kubeflow-pipelines"},"Kubeflow Pipelines"),(0,n.kt)("p",null,"kubeflow pipelines \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,n.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub294 \uc5ec\ub7ec resources \ub97c \ud55c \ubc88\uc5d0 \uc124\uce58\ud558\uace0 \uc788\uc9c0\ub9cc, \uc124\uce58 \uc21c\uc11c\uc758 \uc758\uc874\uc131\uc774 \uc788\ub294 \ub9ac\uc18c\uc2a4\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub54c\uc5d0 \ub530\ub77c \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,n.kt)("p",null,"\uc704\uc640 \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c\ub2e4\uba74, 10 \ucd08 \uc815\ub3c4 \uae30\ub2e4\ub9b0 \ub4a4 \ub2e4\uc2dc \uc704\uc758 \uba85\ub839\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 16\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"pipeline-ui",src:t(484).Z,width:"2868",height:"970"})),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"localhost \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288")),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"localhost-reject",src:t(9129).Z,width:"626",height:"406"})),(0,n.kt)("p",null,"\ub9cc\uc57d \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"localhost\uc5d0\uc11c \uc5f0\uacb0\uc744 \uac70\ubd80\ud588\uc2b5\ub2c8\ub2e4")," \ub77c\ub294 \uc5d0\ub7ec\uac00 \ucd9c\ub825\ub420 \uacbd\uc6b0, \ucee4\ub9e8\ub4dc\ub85c address \uc124\uc815\uc744 \ud1b5\ud574 \uc811\uadfc\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"\ubcf4\uc548\uc0c1\uc758 \ubb38\uc81c\uac00 \ub418\uc9c0 \uc54a\ub294\ub2e4\uba74,")," \uc544\ub798\uc640 \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"0.0.0.0")," \ub85c \ubaa8\ub4e0 \uc8fc\uc18c\uc758 bind\ub97c \uc5f4\uc5b4\uc8fc\ub294 \ubc29\ud5a5\uc73c\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"\uc704\uc758 \uc635\uc158\uc73c\ub85c \uc2e4\ud589\ud588\uc74c\uc5d0\ub3c4 \uc5ec\uc804\ud788 \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288\uac00 \ubc1c\uc0dd\ud560 \uacbd\uc6b0")),(0,n.kt)("p",null,"\ubc29\ud654\ubcbd \uc124\uc815\uc73c\ub85c \uc811\uc18d\ud574 \ubaa8\ub4e0 tcp \ud504\ub85c\ud1a0\ucf5c\uc758 \ud3ec\ud2b8\uc5d0 \ub300\ud55c \uc811\uc18d\uc744 \ud5c8\uac00 \ub610\ub294 8888\ubc88 \ud3ec\ud2b8\uc758 \uc811\uc18d \ud5c8\uac00\ub97c \ucd94\uac00\ud574 \uc811\uadfc \uad8c\ud55c\uc744 \ud5c8\uac00\ud574\uc90d\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("inlineCode",{parentName:"p"},"http://<\ub2f9\uc2e0\uc758 \uac00\uc0c1 \uc778\uc2a4\ud134\uc2a4 \uacf5\uc778 ip \uc8fc\uc18c>:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud558\uba74, ml-pipeline UI \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ud558\ub2e8\uc5d0\uc11c \uc9c4\ud589\ub418\ub294 \ub2e4\ub978 \ud3ec\ud2b8\uc758 \uacbd\ub85c\uc5d0 \uc811\uc18d\ud560 \ub54c\ub3c4 \uc704\uc758 \uc808\ucc28\uc640 \ub3d9\uc77c\ud558\uac8c \ucee4\ub9e8\ub4dc\ub97c \uc2e4\ud589\ud558\uace0, \ubc29\ud654\ubcbd\uc5d0 \ud3ec\ud2b8 \ubc88\ud638\ub97c \ucd94\uac00\ud574\uc8fc\uba74 \uc2e4\ud589\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"katib"},"Katib"),(0,n.kt)("p",null,"Katib \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c katib UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"katib-ui",src:t(5052).Z,width:"2146",height:"620"})),(0,n.kt)("h3",{id:"central-dashboard"},"Central Dashboard"),(0,n.kt)("p",null,"Dashboard \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,n.kt)("p",null,"kubeflow namespace \uc5d0 centraldashboard \uad00\ub828 1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c Central Dashboard UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(3680).Z,width:"4982",height:"1548"})),(0,n.kt)("h3",{id:"admission-webhook"},"Admission Webhook"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,n.kt)("h3",{id:"notebooks--jupyter-web-app"},"Notebooks & Jupyter Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Notebook controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created\nserviceaccount/notebook-controller-service-account created\nrole.rbac.authorization.k8s.io/notebook-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-role created\nrolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created\nconfigmap/notebook-controller-config-m44cmb547t created\nservice/notebook-controller-service created\ndeployment.apps/notebook-controller-deployment created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep notebook-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"notebook-controller-deployment-75b4f7b578-w4d4l 1/1 Running 0 105s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Jupyter Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/jupyter-web-app-service-account created\nrole.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created\nrolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created\nclusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep jupyter-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1\uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"jupyter-web-app-deployment-6f744fbc54-p27ts 1/1 Running 0 2m\n")))),(0,n.kt)("h3",{id:"profiles--kfam"},"Profiles + KFAM"),(0,n.kt)("p",null,"Profile Controller\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,n.kt)("h3",{id:"volumes-web-app"},"Volumes Web App"),(0,n.kt)("p",null,"Volumes Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,n.kt)("p",null,"1\uac1c\uc758 pod\uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,n.kt)("h3",{id:"tensorboard--tensorboard-web-app"},"Tensorboard & Tensorboard Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Web App \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/tensorboards-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created\nconfigmap/tensorboards-web-app-parameters-g28fbd6cch created\nservice/tensorboards-web-app-service created\ndeployment.apps/tensorboards-web-app-deployment created\nvirtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboards-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboards-web-app-deployment-6ff79b7f44-qbzmw 1/1 Running 0 22s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created\nserviceaccount/tensorboard-controller created\nrole.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created\nrolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created\nconfigmap/tensorboard-controller-config-bf88mm96c8 created\nservice/tensorboard-controller-controller-manager-metrics-service created\ndeployment.apps/tensorboard-controller-controller-manager created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboard-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboard-controller-controller-manager-954b7c544-vjpzj 3/3 Running 1 73s\n")))),(0,n.kt)("h3",{id:"training-operator"},"Training Operator"),(0,n.kt)("p",null,"Training Operator \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,n.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,n.kt)("p",null,"Kubeflow \uc0ac\uc6a9\uc744 \uc704\ud574, \uc0ac\uc6a9\ud560 User\uc758 Kubeflow Profile \uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,n.kt)("p",null,"kubeflow-user-example-com profile \uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,n.kt)("h2",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,n.kt)("p",null,"Kubeflow central dashboard\uc5d0 web browser\ub85c \uc811\uc18d\ud558\uae30 \uc704\ud574 \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,n.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"login-ui",src:t(1982).Z,width:"2554",height:"1202"})),(0,n.kt)("p",null,"\ub2e4\uc74c \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"Email Address: ",(0,n.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,n.kt)("li",{parentName:"ul"},"Password: ",(0,n.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(9150).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},9150:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},3680:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/central-dashboard-ddf80e24ff9066a7e3fdbfd0d58b5721.png"},5052:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/katib-ui-f10efe0ffd3bb57b1de7bdc2ff2aa880.png"},9129:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/localhost-reject-8d0b59ff30048e97d5721f786f25c857.png"},1982:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"},484:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/pipeline-ui-796868a1ebeabfd6d1b6eb9b54c389aa.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[956],{3905:(e,a,t)=>{t.d(a,{Zo:()=>p,kt:()=>b});var r=t(7294);function n(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function o(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);a&&(r=r.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var a=1;a=0||(n[t]=e[t]);return n}(e,a);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(n[t]=e[t])}return n}var l=r.createContext({}),c=function(e){var a=r.useContext(l),t=a;return e&&(t="function"==typeof e?e(a):i(i({},a),e)),t},p=function(e){var a=c(e.components);return r.createElement(l.Provider,{value:a},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var a=e.children;return r.createElement(r.Fragment,{},a)}},k=r.forwardRef((function(e,a){var t=e.components,n=e.mdxType,o=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),u=c(t),k=n,b=u["".concat(l,".").concat(k)]||u[k]||d[k]||o;return t?r.createElement(b,i(i({ref:a},p),{},{components:t})):r.createElement(b,i({ref:a},p))}));function b(e,a){var t=arguments,n=a&&a.mdxType;if("string"==typeof e||n){var o=t.length,i=new Array(o);i[0]=k;var s={};for(var l in a)hasOwnProperty.call(a,l)&&(s[l]=a[l]);s.originalType=e,s[u]="string"==typeof e?e:n,i[1]=s;for(var c=2;c{t.r(a),t.d(a,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>c});var r=t(7462),n=(t(7294),t(3905));const o={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,s={unversionedId:"setup-components/install-components-kf",id:"setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/docs/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/docs/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-kf.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/docs/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/docs/setup-components/install-components-mlflow"}},l={},c=[{value:"\uc124\uce58 \ud30c\uc77c \uc900\ube44",id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44",level:2},{value:"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58",id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"OIDC AuthService",id:"oidc-authservice",level:3},{value:"Kubeflow Namespace",id:"kubeflow-namespace",level:3},{value:"Kubeflow Roles",id:"kubeflow-roles",level:3},{value:"Kubeflow Istio Resources",id:"kubeflow-istio-resources",level:3},{value:"Kubeflow Pipelines",id:"kubeflow-pipelines",level:3},{value:"Katib",id:"katib",level:3},{value:"Central Dashboard",id:"central-dashboard",level:3},{value:"Admission Webhook",id:"admission-webhook",level:3},{value:"Notebooks & Jupyter Web App",id:"notebooks--jupyter-web-app",level:3},{value:"Profiles + KFAM",id:"profiles--kfam",level:3},{value:"Volumes Web App",id:"volumes-web-app",level:3},{value:"Tensorboard & Tensorboard Web App",id:"tensorboard--tensorboard-web-app",level:3},{value:"Training Operator",id:"training-operator",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:c},u="wrapper";function d(e){let{components:a,...o}=e;return(0,n.kt)(u,(0,r.Z)({},p,o,{components:a,mdxType:"MDXLayout"}),(0,n.kt)("h2",{id:"\uc124\uce58-\ud30c\uc77c-\uc900\ube44"},"\uc124\uce58 \ud30c\uc77c \uc900\ube44"),(0,n.kt)("p",null,"Kubeflow ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ubc84\uc804\uc744 \uc124\uce58\ud558\uae30 \uc704\ud574\uc11c, \uc124\uce58\uc5d0 \ud544\uc694\ud55c manifests \ud30c\uc77c\ub4e4\uc744 \uc900\ube44\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," \ub97c ",(0,n.kt)("strong",{parentName:"p"},"v1.4.0")," \ud0dc\uadf8\ub85c \uae43 \ud074\ub860\ud55c \ub4a4, \ud574\ub2f9 \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,n.kt)("h2",{id:"\uac01-\uad6c\uc131-\uc694\uc18c\ubcc4-\uc124\uce58"},"\uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58"),(0,n.kt)("p",null,"kubeflow/manifests Repository \uc5d0 \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4 \uc124\uce58 \ucee4\ub9e8\ub4dc\uac00 \uc801\ud600\uc838 \uc788\uc9c0\ub9cc, \uc124\uce58\ud558\uba70 \ubc1c\uc0dd\ud560 \uc218 \uc788\ub294 \uc774\uc288 \ud639\uc740 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc774 \uc801\ud600\uc838 \uc788\uc9c0 \uc54a\uc544 \ucc98\uc74c \uc124\uce58\ud558\ub294 \uacbd\uc6b0 \uc5b4\ub824\uc6c0\uc744 \uacaa\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c, \uac01 \uad6c\uc131 \uc694\uc18c\ubcc4\ub85c \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\ub294 \ubc29\ubc95\uc744 \ud568\uaed8 \uc791\uc131\ud569\ub2c8\ub2e4. "),(0,n.kt)("p",null,"\ub610\ud55c, \ubcf8 \ubb38\uc11c\uc5d0\uc11c\ub294 ",(0,n.kt)("strong",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8e8\uc9c0 \uc54a\ub294 \uad6c\uc131\uc694\uc18c\uc778 Knative, KFServing, MPI Operator \uc758 \uc124\uce58\ub294 \ub9ac\uc18c\uc2a4\uc758 \ud6a8\uc728\uc801 \uc0ac\uc6a9\uc744 \uc704\ud574 \ub530\ub85c \uc124\uce58\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"cert-manager \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,n.kt)("p",{parentName:"li"},"cert-manager namespace \uc758 3 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"kubeflow-issuer \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")))),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook \uc774\uc288"),(0,n.kt)("p",{parentName:"li"},"cert-manager-webhook deployment \uac00 Running \uc774 \uc544\ub2cc \uacbd\uc6b0, \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud558\uba70 kubeflow-issuer\uac00 \uc124\uce58\ub418\uc9c0 \uc54a\uc744 \uc218 \uc788\uc74c\uc5d0 \uc8fc\uc758\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c \uacbd\uc6b0, cert-manager \uc758 3\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub418\ub294 \uac83\uc744 \ud655\uc778\ud55c \uc774\ud6c4 \ub2e4\uc2dc \uba85\ub839\uc5b4\ub97c \uc218\ud589\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,n.kt)("h3",{id:"istio"},"Istio"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \uad00\ub828 Custom Resource Definition(CRD) \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio namespace \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"istio \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,n.kt)("p",{parentName:"li"},"istio-system namespace \uc758 2 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,n.kt)("p",{parentName:"li"},"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,n.kt)("h3",{id:"dex"},"Dex"),(0,n.kt)("p",null,"dex \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,n.kt)("p",null,"auth namespace \uc758 1 \uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,n.kt)("h3",{id:"oidc-authservice"},"OIDC AuthService"),(0,n.kt)("p",null,"OIDC AuthService \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,n.kt)("p",null,"istio-system namespace \uc5d0 authservice-0 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,n.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,n.kt)("h3",{id:"kubeflow-namespace"},"Kubeflow Namespace"),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,n.kt)("p",null,"kubeflow namespace \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,n.kt)("h3",{id:"kubeflow-roles"},"Kubeflow Roles"),(0,n.kt)("p",null,"kubeflow-roles \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 6\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,n.kt)("h3",{id:"kubeflow-istio-resources"},"Kubeflow Istio Resources"),(0,n.kt)("p",null,"kubeflow-istio-resources \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,n.kt)("p",null,"\ubc29\uae08 \uc0dd\uc131\ud55c kubeflow roles \ub97c \uc870\ud68c\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 3\uac1c\uc758 clusterrole \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,n.kt)("p",null,"Kubeflow namespace \uc5d0 gateway \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,n.kt)("h3",{id:"kubeflow-pipelines"},"Kubeflow Pipelines"),(0,n.kt)("p",null,"kubeflow pipelines \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,n.kt)("p",null,"\uc704 \uba85\ub839\uc5b4\ub294 \uc5ec\ub7ec resources \ub97c \ud55c \ubc88\uc5d0 \uc124\uce58\ud558\uace0 \uc788\uc9c0\ub9cc, \uc124\uce58 \uc21c\uc11c\uc758 \uc758\uc874\uc131\uc774 \uc788\ub294 \ub9ac\uc18c\uc2a4\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4.",(0,n.kt)("br",{parentName:"p"}),"\n","\ub530\ub77c\uc11c \ub54c\uc5d0 \ub530\ub77c \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,n.kt)("p",null,"\uc704\uc640 \ube44\uc2b7\ud55c \uc5d0\ub7ec\uac00 \ubc1c\uc0dd\ud55c\ub2e4\uba74, 10 \ucd08 \uc815\ub3c4 \uae30\ub2e4\ub9b0 \ub4a4 \ub2e4\uc2dc \uc704\uc758 \uba85\ub839\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 16\uac1c\uc758 pod \uac00 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"pipeline-ui",src:t(484).Z,width:"2868",height:"970"})),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"localhost \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288")),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"localhost-reject",src:t(9129).Z,width:"626",height:"406"})),(0,n.kt)("p",null,"\ub9cc\uc57d \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"localhost\uc5d0\uc11c \uc5f0\uacb0\uc744 \uac70\ubd80\ud588\uc2b5\ub2c8\ub2e4")," \ub77c\ub294 \uc5d0\ub7ec\uac00 \ucd9c\ub825\ub420 \uacbd\uc6b0, \ucee4\ub9e8\ub4dc\ub85c address \uc124\uc815\uc744 \ud1b5\ud574 \uc811\uadfc\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("strong",{parentName:"p"},"\ubcf4\uc548\uc0c1\uc758 \ubb38\uc81c\uac00 \ub418\uc9c0 \uc54a\ub294\ub2e4\uba74,")," \uc544\ub798\uc640 \uac19\uc774 ",(0,n.kt)("inlineCode",{parentName:"p"},"0.0.0.0")," \ub85c \ubaa8\ub4e0 \uc8fc\uc18c\uc758 bind\ub97c \uc5f4\uc5b4\uc8fc\ub294 \ubc29\ud5a5\uc73c\ub85c ml-pipeline UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"\uc704\uc758 \uc635\uc158\uc73c\ub85c \uc2e4\ud589\ud588\uc74c\uc5d0\ub3c4 \uc5ec\uc804\ud788 \uc5f0\uacb0 \uac70\ubd80 \uc774\uc288\uac00 \ubc1c\uc0dd\ud560 \uacbd\uc6b0")),(0,n.kt)("p",null,"\ubc29\ud654\ubcbd \uc124\uc815\uc73c\ub85c \uc811\uc18d\ud574 \ubaa8\ub4e0 tcp \ud504\ub85c\ud1a0\ucf5c\uc758 \ud3ec\ud2b8\uc5d0 \ub300\ud55c \uc811\uc18d\uc744 \ud5c8\uac00 \ub610\ub294 8888\ubc88 \ud3ec\ud2b8\uc758 \uc811\uc18d \ud5c8\uac00\ub97c \ucd94\uac00\ud574 \uc811\uadfc \uad8c\ud55c\uc744 \ud5c8\uac00\ud574\uc90d\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("inlineCode",{parentName:"p"},"http://<\ub2f9\uc2e0\uc758 \uac00\uc0c1 \uc778\uc2a4\ud134\uc2a4 \uacf5\uc778 ip \uc8fc\uc18c>:8888/#/pipelines/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud558\uba74, ml-pipeline UI \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ud558\ub2e8\uc5d0\uc11c \uc9c4\ud589\ub418\ub294 \ub2e4\ub978 \ud3ec\ud2b8\uc758 \uacbd\ub85c\uc5d0 \uc811\uc18d\ud560 \ub54c\ub3c4 \uc704\uc758 \uc808\ucc28\uc640 \ub3d9\uc77c\ud558\uac8c \ucee4\ub9e8\ub4dc\ub97c \uc2e4\ud589\ud558\uace0, \ubc29\ud654\ubcbd\uc5d0 \ud3ec\ud2b8 \ubc88\ud638\ub97c \ucd94\uac00\ud574\uc8fc\uba74 \uc2e4\ud589\ud558\ub294 \uac83\uc774 \uac00\ub2a5\ud569\ub2c8\ub2e4."),(0,n.kt)("h3",{id:"katib"},"Katib"),(0,n.kt)("p",null,"Katib \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ucd1d 4 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c katib UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"katib-ui",src:t(5052).Z,width:"2146",height:"620"})),(0,n.kt)("h3",{id:"central-dashboard"},"Central Dashboard"),(0,n.kt)("p",null,"Dashboard \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,n.kt)("p",null,"kubeflow namespace \uc5d0 centraldashboard \uad00\ub828 1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,n.kt)("p",null,"\ucd94\uac00\ub85c Central Dashboard UI\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,n.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," \uacbd\ub85c\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(3680).Z,width:"4982",height:"1548"})),(0,n.kt)("h3",{id:"admission-webhook"},"Admission Webhook"),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,n.kt)("h3",{id:"notebooks--jupyter-web-app"},"Notebooks & Jupyter Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Notebook controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created\nserviceaccount/notebook-controller-service-account created\nrole.rbac.authorization.k8s.io/notebook-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created\nclusterrole.rbac.authorization.k8s.io/notebook-controller-role created\nrolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created\nconfigmap/notebook-controller-config-m44cmb547t created\nservice/notebook-controller-service created\ndeployment.apps/notebook-controller-deployment created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep notebook-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"notebook-controller-deployment-75b4f7b578-w4d4l 1/1 Running 0 105s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Jupyter Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/jupyter-web-app-service-account created\nrole.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created\nclusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created\nrolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created\nclusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep jupyter-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1\uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"jupyter-web-app-deployment-6f744fbc54-p27ts 1/1 Running 0 2m\n")))),(0,n.kt)("h3",{id:"profiles--kfam"},"Profiles + KFAM"),(0,n.kt)("p",null,"Profile Controller\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,n.kt)("h3",{id:"volumes-web-app"},"Volumes Web App"),(0,n.kt)("p",null,"Volumes Web App \uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,n.kt)("p",null,"1\uac1c\uc758 pod\uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,n.kt)("h3",{id:"tensorboard--tensorboard-web-app"},"Tensorboard & Tensorboard Web App"),(0,n.kt)("ol",null,(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Web App \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/tensorboards-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created\nclusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created\nconfigmap/tensorboards-web-app-parameters-g28fbd6cch created\nservice/tensorboards-web-app-service created\ndeployment.apps/tensorboards-web-app-deployment created\nvirtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboards-web-app\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboards-web-app-deployment-6ff79b7f44-qbzmw 1/1 Running 0 22s\n"))),(0,n.kt)("li",{parentName:"ol"},(0,n.kt)("p",{parentName:"li"},"Tensorboard Controller \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created\nserviceaccount/tensorboard-controller created\nrole.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created\nclusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created\nrolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created\nconfigmap/tensorboard-controller-config-bf88mm96c8 created\nservice/tensorboard-controller-controller-manager-metrics-service created\ndeployment.apps/tensorboard-controller-controller-manager created\n")),(0,n.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep tensorboard-controller\n")),(0,n.kt)("p",{parentName:"li"},"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",{parentName:"li"},(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"tensorboard-controller-controller-manager-954b7c544-vjpzj 3/3 Running 1 73s\n")))),(0,n.kt)("h3",{id:"training-operator"},"Training Operator"),(0,n.kt)("p",null,"Training Operator \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,n.kt)("p",null,"1 \uac1c\uc758 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,n.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,n.kt)("p",null,"Kubeflow \uc0ac\uc6a9\uc744 \uc704\ud574, \uc0ac\uc6a9\ud560 User\uc758 Kubeflow Profile \uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,n.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,n.kt)("p",null,"kubeflow-user-example-com profile \uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,n.kt)("h2",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,n.kt)("p",null,"Kubeflow central dashboard\uc5d0 web browser\ub85c \uc811\uc18d\ud558\uae30 \uc704\ud574 \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,n.kt)("pre",null,(0,n.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,n.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,n.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"login-ui",src:t(1982).Z,width:"2554",height:"1202"})),(0,n.kt)("p",null,"\ub2e4\uc74c \uc811\uc18d \uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,n.kt)("ul",null,(0,n.kt)("li",{parentName:"ul"},"Email Address: ",(0,n.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,n.kt)("li",{parentName:"ul"},"Password: ",(0,n.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"central-dashboard",src:t(9150).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},9150:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},3680:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/central-dashboard-ddf80e24ff9066a7e3fdbfd0d58b5721.png"},5052:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/katib-ui-f10efe0ffd3bb57b1de7bdc2ff2aa880.png"},9129:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/localhost-reject-8d0b59ff30048e97d5721f786f25c857.png"},1982:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"},484:(e,a,t)=>{t.d(a,{Z:()=>r});const r=t.p+"assets/images/pipeline-ui-796868a1ebeabfd6d1b6eb9b54c389aa.png"}}]); \ No newline at end of file diff --git a/assets/js/b1f93f8c.0d068ec2.js b/assets/js/b1f93f8c.2e1de0ec.js similarity index 99% rename from assets/js/b1f93f8c.0d068ec2.js rename to assets/js/b1f93f8c.2e1de0ec.js index 803b4a64..7b84765e 100644 --- a/assets/js/b1f93f8c.0d068ec2.js +++ b/assets/js/b1f93f8c.2e1de0ec.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5717],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function u(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),o=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):u(u({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=o(n),d=a,b=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return n?r.createElement(b,u(u({ref:t},p),{},{components:n})):r.createElement(b,u({ref:t},p))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,u=new Array(l);u[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:a,u[1]=s;for(var o=2;o{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>u,default:()=>k,frontMatter:()=>l,metadata:()=>s,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const l={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},u=void 0,s={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},i={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc704\ud55c \ub124\ud2b8\uc6cc\ud06c\uc758 \uc124\uc815\uc744 \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{n.d(t,{Zo:()=>p,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function u(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),o=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):u(u({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=o(n),d=a,b=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return n?r.createElement(b,u(u({ref:t},p),{},{components:n})):r.createElement(b,u({ref:t},p))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,u=new Array(l);u[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:a,u[1]=s;for(var o=2;o{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>u,default:()=>k,frontMatter:()=>l,metadata:()=>s,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const l={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},u=void 0,s={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},i={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc704\ud55c \ub124\ud2b8\uc6cc\ud06c\uc758 \uc124\uc815\uc744 \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{r.d(e,{Zo:()=>o,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function u(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function a(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var u=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):a(a({},e),n)),r},o=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},_="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,u=n.originalType,s=n.parentName,o=i(n,["components","mdxType","originalType","parentName"]),_=m(r),d=p,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||u;return r?t.createElement(b,a(a({ref:e},o),{},{components:r})):t.createElement(b,a({ref:e},o))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var u=r.length,a=new Array(u);a[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[_]="string"==typeof n?n:p,a[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>a,default:()=>l,frontMatter:()=>u,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const u={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},a=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/docs/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/docs/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/docs/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/docs/kubeflow/basic-pipeline-upload"}},s={},m=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],o={toc:m},_="wrapper";function l(n){let{components:e,...u}=n;return(0,p.kt)(_,(0,t.Z)({},o,u,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline"},"Pipeline"),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub418\uc9c0 \uc54a\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uad6c\uc131\uc694\uc18c\ub85c\uc368 \uc2e4\ud589\ub429\ub2c8\ub2e4. \uadf8\ub7ec\ubbc0\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud574 \ubcf4\ub824\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc21c\uc11c\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc22b\uc790\ub97c \uc785\ub825\ubc1b\uace0 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ub450 \uac1c\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc22b\uc790\ub97c \ubc1b\uc544\uc11c \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc788\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub9cc\ub4e4\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"component-set"},"Component Set"),(0,p.kt)("p",null,"\uc6b0\uc120 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,p.kt)("ol",null,(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc785\ub825\ubc1b\uc740 \uac12\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \ub450 \uac1c\uc758 \uc22b\uc790\uc758 \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774 \ucef4\ud3ec\ub10c\ud2b8 \uc5ed\uc2dc \ub450 \uc22b\uc790\uc758 \ud569\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,p.kt)("h2",{id:"component-order"},"Component Order"),(0,p.kt)("h3",{id:"define-order"},"Define Order"),(0,p.kt)("p",null,"\ud544\uc694\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uc744 \ub9cc\ub4e4\uc5c8\uc73c\uba74, \ub2e4\uc74c\uc73c\ub85c\ub294 \uc774\ub4e4\uc758 \uc21c\uc11c\ub97c \uc815\uc758\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \ub9cc\ub4e4 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc21c\uc11c\ub97c \uadf8\ub9bc\uc73c\ub85c \ud45c\ud604\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"pipeline-0.png",src:r(3618).Z,width:"586",height:"262"})),(0,p.kt)("h3",{id:"single-output"},"Single Output"),(0,p.kt)("p",null,"\uc774\uc81c \uc774 \uc21c\uc11c\ub97c \ucf54\ub4dc\ub85c \uc62e\uaca8\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. "),(0,p.kt)("p",null,"\uc6b0\uc120 \uc704\uc758 \uadf8\ub9bc\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," \uacfc ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," \ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uace0 \uadf8 \ubc18\ud658 \uac12\uc744 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_2_result")," \uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc800\uc7a5\ub41c ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc758 \ubc18\ud658 \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output")," \ub97c \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"multi-output"},"Multi Output"),(0,p.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e8\uc77c \uac12\ub9cc\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"output"),"\uc744 \uc774\uc6a9\ud574 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \uc5ec\ub7ec \uac1c\uc758 \ubc18\ud658 \uac12\uc774 \uc788\ub2e4\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"outputs"),"\uc5d0 \uc800\uc7a5\uc774 \ub418\uba70 dict \ud0c0\uc785\uc774\uae30\uc5d0 key\ub97c \uc774\uc6a9\ud574 \uc6d0\ud558\ub294 \ubc18\ud658 \uac12\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \uc55e\uc5d0\uc11c \uc791\uc131\ud55c \uc5ec\ub7ec \uac1c\ub97c \ubc18\ud658\ud558\ub294 ",(0,p.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-component#define-a-standalone-python-function"},"\ucef4\ud3ec\ub10c\ud2b8")," \uc758 \uacbd\uc6b0\ub97c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n",(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number")," \uc758 return \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"quotient")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"remainder")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \uac12\uc744 ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")," \uc5d0 \uc804\ub2ec\ud558\ub294 \uc608\uc2dc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,p.kt)("p",null,(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number"),"\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"divided_result"),"\uc5d0 \uc800\uc7a5\ud558\uace0 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]'),", ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"\ub85c \uac12\uc744 \uac00\uc838\uc62c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,p.kt)("p",null,"\uc774\uc81c \ub2e4\uc2dc \ubcf8\ub860\uc73c\ub85c \ub3cc\uc544\uc640\uc11c \uc774 \ub450 \uac12\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")," \uc5d0 \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \uac01 \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uc744 \ubaa8\uc544\uc11c \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc815\uc758 \ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,p.kt)("p",null,"\ub9c8\uc9c0\ub9c9\uc73c\ub85c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,p.kt)("p",null,"Kubeflow\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 yaml \ud615\uc2dd\uc73c\ub85c\ub9cc \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc5d0 \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc815\ud574\uc9c4 yaml \ud615\uc2dd\uc73c\ub85c \ucef4\ud30c\uc77c(Compile) \ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ucef4\ud30c\uc77c\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("h2",{id:"conclusion"},"Conclusion"),(0,p.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \ubaa8\uc73c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ucef4\ud30c\uc77c\ub41c \uacb0\uacfc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}l.isMDXComponent=!0},3618:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6746],{3905:(n,e,r)=>{r.d(e,{Zo:()=>o,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function u(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function a(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var u=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):a(a({},e),n)),r},o=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},_="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,u=n.originalType,s=n.parentName,o=i(n,["components","mdxType","originalType","parentName"]),_=m(r),d=p,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||u;return r?t.createElement(b,a(a({ref:e},o),{},{components:r})):t.createElement(b,a({ref:e},o))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var u=r.length,a=new Array(u);a[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[_]="string"==typeof n?n:p,a[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>a,default:()=>l,frontMatter:()=>u,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const u={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},a=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/docs/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/docs/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/docs/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/docs/kubeflow/basic-pipeline-upload"}},s={},m=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],o={toc:m},_="wrapper";function l(n){let{components:e,...u}=n;return(0,p.kt)(_,(0,t.Z)({},o,u,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline"},"Pipeline"),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub418\uc9c0 \uc54a\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uad6c\uc131\uc694\uc18c\ub85c\uc368 \uc2e4\ud589\ub429\ub2c8\ub2e4. \uadf8\ub7ec\ubbc0\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud574 \ubcf4\ub824\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc21c\uc11c\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc22b\uc790\ub97c \uc785\ub825\ubc1b\uace0 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ub450 \uac1c\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc22b\uc790\ub97c \ubc1b\uc544\uc11c \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc788\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub9cc\ub4e4\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"component-set"},"Component Set"),(0,p.kt)("p",null,"\uc6b0\uc120 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,p.kt)("ol",null,(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc785\ub825\ubc1b\uc740 \uac12\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \ub450 \uac1c\uc758 \uc22b\uc790\uc758 \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774 \ucef4\ud3ec\ub10c\ud2b8 \uc5ed\uc2dc \ub450 \uc22b\uc790\uc758 \ud569\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,p.kt)("h2",{id:"component-order"},"Component Order"),(0,p.kt)("h3",{id:"define-order"},"Define Order"),(0,p.kt)("p",null,"\ud544\uc694\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uc744 \ub9cc\ub4e4\uc5c8\uc73c\uba74, \ub2e4\uc74c\uc73c\ub85c\ub294 \uc774\ub4e4\uc758 \uc21c\uc11c\ub97c \uc815\uc758\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \ub9cc\ub4e4 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc21c\uc11c\ub97c \uadf8\ub9bc\uc73c\ub85c \ud45c\ud604\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"pipeline-0.png",src:r(3618).Z,width:"586",height:"262"})),(0,p.kt)("h3",{id:"single-output"},"Single Output"),(0,p.kt)("p",null,"\uc774\uc81c \uc774 \uc21c\uc11c\ub97c \ucf54\ub4dc\ub85c \uc62e\uaca8\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. "),(0,p.kt)("p",null,"\uc6b0\uc120 \uc704\uc758 \uadf8\ub9bc\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," \uacfc ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," \ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uace0 \uadf8 \ubc18\ud658 \uac12\uc744 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_2_result")," \uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc800\uc7a5\ub41c ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc758 \ubc18\ud658 \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output")," \ub97c \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"multi-output"},"Multi Output"),(0,p.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e8\uc77c \uac12\ub9cc\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"output"),"\uc744 \uc774\uc6a9\ud574 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \uc5ec\ub7ec \uac1c\uc758 \ubc18\ud658 \uac12\uc774 \uc788\ub2e4\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"outputs"),"\uc5d0 \uc800\uc7a5\uc774 \ub418\uba70 dict \ud0c0\uc785\uc774\uae30\uc5d0 key\ub97c \uc774\uc6a9\ud574 \uc6d0\ud558\ub294 \ubc18\ud658 \uac12\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \uc55e\uc5d0\uc11c \uc791\uc131\ud55c \uc5ec\ub7ec \uac1c\ub97c \ubc18\ud658\ud558\ub294 ",(0,p.kt)("a",{parentName:"p",href:"/docs/kubeflow/basic-component#define-a-standalone-python-function"},"\ucef4\ud3ec\ub10c\ud2b8")," \uc758 \uacbd\uc6b0\ub97c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n",(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number")," \uc758 return \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"quotient")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"remainder")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \uac12\uc744 ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")," \uc5d0 \uc804\ub2ec\ud558\ub294 \uc608\uc2dc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,p.kt)("p",null,(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number"),"\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"divided_result"),"\uc5d0 \uc800\uc7a5\ud558\uace0 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]'),", ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"\ub85c \uac12\uc744 \uac00\uc838\uc62c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,p.kt)("p",null,"\uc774\uc81c \ub2e4\uc2dc \ubcf8\ub860\uc73c\ub85c \ub3cc\uc544\uc640\uc11c \uc774 \ub450 \uac12\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")," \uc5d0 \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \uac01 \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uc744 \ubaa8\uc544\uc11c \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc815\uc758 \ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,p.kt)("p",null,"\ub9c8\uc9c0\ub9c9\uc73c\ub85c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,p.kt)("p",null,"Kubeflow\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 yaml \ud615\uc2dd\uc73c\ub85c\ub9cc \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc5d0 \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc815\ud574\uc9c4 yaml \ud615\uc2dd\uc73c\ub85c \ucef4\ud30c\uc77c(Compile) \ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ucef4\ud30c\uc77c\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("h2",{id:"conclusion"},"Conclusion"),(0,p.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \ubaa8\uc73c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ucef4\ud30c\uc77c\ub41c \uacb0\uacfc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}l.isMDXComponent=!0},3618:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file diff --git a/assets/js/b3824f13.eb8c4c60.js b/assets/js/b3824f13.26419845.js similarity index 99% rename from assets/js/b3824f13.eb8c4c60.js rename to assets/js/b3824f13.26419845.js index 74205a48..21f800f9 100644 --- a/assets/js/b3824f13.eb8c4c60.js +++ b/assets/js/b3824f13.26419845.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4818],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function l(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(l[n]=e[n]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(l[n]=e[n])}return l}var p=o.createContext({}),c=function(e){var t=o.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=c(e.components);return o.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,l=e.mdxType,r=e.originalType,p=e.parentName,s=a(e,["components","mdxType","originalType","parentName"]),u=c(n),f=l,m=u["".concat(p,".").concat(f)]||u[f]||d[f]||r;return n?o.createElement(m,i(i({ref:t},s),{},{components:n})):o.createElement(m,i({ref:t},s))}));function m(e,t){var n=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=n.length,i=new Array(r);i[0]=f;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:l,i[1]=a;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>c});var o=n(7462),l=(n(7294),n(3905));const r={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},i=void 0,a={unversionedId:"kubeflow/kubeflow-concepts",id:"kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/docs/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/docs/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-concepts.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/docs/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/docs/kubeflow/basic-requirements"}},p={},c=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:c},u="wrapper";function d(e){let{components:t,...r}=e;return(0,l.kt)(u,(0,o.Z)({},s,r,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"component"},"Component"),(0,l.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component contents)\uc640 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component wrapper)\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ud1b5\ud574 kubeflow\uc5d0 \uc804\ub2ec\ub418\uba70 \uc804\ub2ec\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589(execute)\ud558\uace0 \uc544\ud2f0\ud329\ud2b8(artifacts)\ub4e4\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-0.png",src:n(3396).Z,width:"1392",height:"704"})),(0,l.kt)("h3",{id:"component-contents"},"Component Contents"),(0,l.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uad6c\uc131\ud558\ub294 \uac83\uc740 \ucd1d 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-1.png",src:n(8482).Z,width:"574",height:"436"})),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Environemnt"),(0,l.kt)("li",{parentName:"ol"},"Python code w\\ Config"),(0,l.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,l.kt)("p",null,"\uc608\uc2dc\uc640 \ud568\uaed8 \uac01 \uad6c\uc131 \uc694\uc18c\uac00 \uc5b4\ub5a4 \uac83\uc778\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub2e4\uc74c\uacfc \uac19\uc774 \ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc640 SVC(Support Vector Classifier)\ub97c \ud559\uc2b5\ud55c \ud6c4 SVC \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc744 \uc801\uc740 \ud30c\uc774\uc36c \ucf54\ub4dc\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,l.kt)("p",null,"\uc704\uc758 \ud30c\uc774\uc36c \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub85c \ub098\ub20c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-2.png",src:n(4582).Z,width:"832",height:"410"})),(0,l.kt)("p",null,"Environment\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\ub4e4\uc744 import\ud558\ub294 \ubd80\ubd84\uc785\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uc73c\ub85c Python Code w\\ Config \uc5d0\uc11c\ub294 \uc8fc\uc5b4\uc9c4 Config\ub97c \uc774\uc6a9\ud574 \uc2e4\uc81c\ub85c \ud559\uc2b5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ub9c8\uc9c0\ub9c9\uc73c\ub85c \uc544\ud2f0\ud329\ud2b8\ub97c \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,l.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0 \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud558\uace0 \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc791\uc5c5\uc744 \ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-3.png",src:n(3204).Z,width:"1066",height:"766"})),(0,l.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc704\uc758 ",(0,l.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv"),"\uc640 \uac19\uc774 \ud568\uc218\uc758 \ud615\ud0dc\ub85c \uc815\uc758\ud569\ub2c8\ub2e4.\n\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \ucf58\ud150\uce20\ub97c \uac10\uc2f8\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-4.png",src:n(4175).Z,width:"464",height:"826"})),(0,l.kt)("h3",{id:"artifacts"},"Artifacts"),(0,l.kt)("p",null,"\uc704\uc758 \uc124\uba85\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc544\ud2f0\ud329\ud2b8(Artifacts)\ub97c \uc0dd\uc131\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \uc544\ud2f0\ud329\ud2b8\ub780 evaluation result, log \ub4f1 \uc5b4\ub5a4 \ud615\ud0dc\ub85c\ub4e0 \ud30c\uc77c\ub85c \uc0dd\uc131\ub418\ub294 \uac83\uc744 \ud1b5\ud2c0\uc5b4\uc11c \uce6d\ud558\ub294 \uc6a9\uc5b4\uc785\ub2c8\ub2e4.\n\uadf8\uc911 \uc6b0\ub9ac\uac00 \uad00\uc2ec\uc744 \ub450\ub294 \uc720\uc758\ubbf8\ud55c \uac83\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-5.png",src:n(7436).Z,width:"1700",height:"454"})),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"Model"),(0,l.kt)("li",{parentName:"ul"},"Data"),(0,l.kt)("li",{parentName:"ul"},"Metric"),(0,l.kt)("li",{parentName:"ul"},"etc")),(0,l.kt)("h4",{id:"model"},"Model"),(0,l.kt)("p",null,"\uc800\ud76c\ub294 \ubaa8\ub378\uc744 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc758 \ud588\uc2b5\ub2c8\ub2e4."),(0,l.kt)("blockquote",null,(0,l.kt)("p",{parentName:"blockquote"},"\ubaa8\ub378\uc774\ub780 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ud559\uc2b5\ub41c Weights\uc640 Network \uad6c\uc870 \uadf8\ub9ac\uace0 \uc774\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud55c \ud658\uacbd\uc774 \ubaa8\ub450 \ud3ec\ud568\ub41c \ud615\ud0dc")),(0,l.kt)("h4",{id:"data"},"Data"),(0,l.kt)("p",null,"\ub370\uc774\ud130\ub294 \uc804 \ucc98\ub9ac\ub41c \ud53c\ucc98, \ubaa8\ub378\uc758 \uc608\uce21 \uac12 \ub4f1\uc744 \ud3ec\ud568\ud569\ub2c8\ub2e4."),(0,l.kt)("h4",{id:"metric"},"Metric"),(0,l.kt)("p",null,"Metric\uc740 \ub3d9\uc801 \uc9c0\ud45c\uc640 \uc815\uc801 \uc9c0\ud45c \ub450 \uac00\uc9c0\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"\ub3d9\uc801 \uc9c0\ud45c\ub780 train loss\uc640 \uac19\uc774 \ud559\uc2b5\uc774 \uc9c4\ud589\ub418\ub294 \uc911 \uc5d0\ud3ed(Epoch)\ub9c8\ub2e4 \uacc4\uc18d\ud574\uc11c \ubcc0\ud654\ud558\ub294 \uac12\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("li",{parentName:"ul"},"\uc815\uc801 \uc9c0\ud45c\ub780 \ud559\uc2b5\uc774 \ub05d\ub09c \ud6c4 \ucd5c\uc885\uc801\uc73c\ub85c \ubaa8\ub378\uc744 \ud3c9\uac00\ud558\ub294 \uc815\ud655\ub3c4 \ub4f1\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")),(0,l.kt)("h2",{id:"pipeline"},"Pipeline"),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc21c\uc11c\ub3c4\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uc21c\uc11c\ub3c4\ub294 \ubc29\ud5a5 \uc21c\ud658\uc774 \uc5c6\ub294 \uadf8\ub798\ud504\ub85c \uc774\ub8e8\uc5b4\uc838 \uc788\uc73c\uba70, \uac04\ub2e8\ud55c \uc870\uac74\ubb38\uc744 \ud3ec\ud568\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-6.png",src:n(9429).Z,width:"1696",height:"746"})),(0,l.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,l.kt)("p",null,"\uc55e\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574\uc11c\ub294 Config\uac00 \ud544\uc694\ud558\ub2e4\uace0 \uc124\uba85\ud588\uc2b5\ub2c8\ub2e4. \ud30c\uc774\ud504\ub77c\uc778\uc744 \uad6c\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 Config \ub4e4\uc744 \ubaa8\uc544 \ub454 \uac83\uc774 \ud30c\uc774\ud504\ub77c\uc778 Config\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-7.png",src:n(4607).Z,width:"1810",height:"432"})),(0,l.kt)("h2",{id:"run"},"Run"),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \ud544\uc694\ub85c \ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\uac00 \uc8fc\uc5b4\uc838\uc57c\uc9c0\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Kubeflow\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc744 Run \uc774\ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-8.png",src:n(6818).Z,width:"1810",height:"576"})),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub418\uba74 \uac01 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\nKubeflow pipeline\uc5d0\uc11c\ub294 Run \ud558\ub098\ub2f9 \uace0\uc720\ud55c ID \ub97c \uc0dd\uc131\ud558\uace0, Run\uc5d0\uc11c \uc0dd\uc131\ub418\ub294 \ubaa8\ub4e0 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-9.png",src:n(7446).Z,width:"1810",height:"592"})),(0,l.kt)("p",null,"\uadf8\ub7ec\uba74 \uc774\uc81c \uc9c1\uc811 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},3396:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},8482:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},4582:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},3204:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},4175:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},7436:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},9429:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},4607:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},6818:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},7446:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4818],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function l(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(l[n]=e[n]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(l[n]=e[n])}return l}var p=o.createContext({}),c=function(e){var t=o.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=c(e.components);return o.createElement(p.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,l=e.mdxType,r=e.originalType,p=e.parentName,s=a(e,["components","mdxType","originalType","parentName"]),u=c(n),f=l,m=u["".concat(p,".").concat(f)]||u[f]||d[f]||r;return n?o.createElement(m,i(i({ref:t},s),{},{components:n})):o.createElement(m,i({ref:t},s))}));function m(e,t){var n=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=n.length,i=new Array(r);i[0]=f;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[u]="string"==typeof e?e:l,i[1]=a;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>a,toc:()=>c});var o=n(7462),l=(n(7294),n(3905));const r={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},i=void 0,a={unversionedId:"kubeflow/kubeflow-concepts",id:"kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/docs/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/docs/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-concepts.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/docs/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/docs/kubeflow/basic-requirements"}},p={},c=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:c},u="wrapper";function d(e){let{components:t,...r}=e;return(0,l.kt)(u,(0,o.Z)({},s,r,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"component"},"Component"),(0,l.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8(Component)\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20(Component contents)\uc640 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c(Component wrapper)\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \ud1b5\ud574 kubeflow\uc5d0 \uc804\ub2ec\ub418\uba70 \uc804\ub2ec\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc815\uc758\ub41c \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uc2e4\ud589(execute)\ud558\uace0 \uc544\ud2f0\ud329\ud2b8(artifacts)\ub4e4\uc744 \uc0dd\uc0b0\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-0.png",src:n(3396).Z,width:"1392",height:"704"})),(0,l.kt)("h3",{id:"component-contents"},"Component Contents"),(0,l.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub97c \uad6c\uc131\ud558\ub294 \uac83\uc740 \ucd1d 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-1.png",src:n(8482).Z,width:"574",height:"436"})),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Environemnt"),(0,l.kt)("li",{parentName:"ol"},"Python code w\\ Config"),(0,l.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,l.kt)("p",null,"\uc608\uc2dc\uc640 \ud568\uaed8 \uac01 \uad6c\uc131 \uc694\uc18c\uac00 \uc5b4\ub5a4 \uac83\uc778\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub2e4\uc74c\uacfc \uac19\uc774 \ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc640 SVC(Support Vector Classifier)\ub97c \ud559\uc2b5\ud55c \ud6c4 SVC \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc744 \uc801\uc740 \ud30c\uc774\uc36c \ucf54\ub4dc\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,l.kt)("p",null,"\uc704\uc758 \ud30c\uc774\uc36c \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\ub85c \ub098\ub20c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-2.png",src:n(4582).Z,width:"832",height:"410"})),(0,l.kt)("p",null,"Environment\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc\uc5d0\uc11c \uc0ac\uc6a9\ud558\ub294 \ud328\ud0a4\uc9c0\ub4e4\uc744 import\ud558\ub294 \ubd80\ubd84\uc785\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c\uc73c\ub85c Python Code w\\ Config \uc5d0\uc11c\ub294 \uc8fc\uc5b4\uc9c4 Config\ub97c \uc774\uc6a9\ud574 \uc2e4\uc81c\ub85c \ud559\uc2b5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ub9c8\uc9c0\ub9c9\uc73c\ub85c \uc544\ud2f0\ud329\ud2b8\ub97c \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,l.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ucf58\ud150\uce20\uc5d0 \ud544\uc694\ud55c Config\ub97c \uc804\ub2ec\ud558\uace0 \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc791\uc5c5\uc744 \ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-3.png",src:n(3204).Z,width:"1066",height:"766"})),(0,l.kt)("p",null,"Kubeflow\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\ub97c \uc704\uc758 ",(0,l.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv"),"\uc640 \uac19\uc774 \ud568\uc218\uc758 \ud615\ud0dc\ub85c \uc815\uc758\ud569\ub2c8\ub2e4.\n\ucef4\ud3ec\ub10c\ud2b8 \ub798\ud37c\uac00 \ucf58\ud150\uce20\ub97c \uac10\uc2f8\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-4.png",src:n(4175).Z,width:"464",height:"826"})),(0,l.kt)("h3",{id:"artifacts"},"Artifacts"),(0,l.kt)("p",null,"\uc704\uc758 \uc124\uba85\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc544\ud2f0\ud329\ud2b8(Artifacts)\ub97c \uc0dd\uc131\ud55c\ub2e4\uace0 \ud588\uc2b5\ub2c8\ub2e4. \uc544\ud2f0\ud329\ud2b8\ub780 evaluation result, log \ub4f1 \uc5b4\ub5a4 \ud615\ud0dc\ub85c\ub4e0 \ud30c\uc77c\ub85c \uc0dd\uc131\ub418\ub294 \uac83\uc744 \ud1b5\ud2c0\uc5b4\uc11c \uce6d\ud558\ub294 \uc6a9\uc5b4\uc785\ub2c8\ub2e4.\n\uadf8\uc911 \uc6b0\ub9ac\uac00 \uad00\uc2ec\uc744 \ub450\ub294 \uc720\uc758\ubbf8\ud55c \uac83\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-5.png",src:n(7436).Z,width:"1700",height:"454"})),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"Model"),(0,l.kt)("li",{parentName:"ul"},"Data"),(0,l.kt)("li",{parentName:"ul"},"Metric"),(0,l.kt)("li",{parentName:"ul"},"etc")),(0,l.kt)("h4",{id:"model"},"Model"),(0,l.kt)("p",null,"\uc800\ud76c\ub294 \ubaa8\ub378\uc744 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc758 \ud588\uc2b5\ub2c8\ub2e4."),(0,l.kt)("blockquote",null,(0,l.kt)("p",{parentName:"blockquote"},"\ubaa8\ub378\uc774\ub780 \ud30c\uc774\uc36c \ucf54\ub4dc\uc640 \ud559\uc2b5\ub41c Weights\uc640 Network \uad6c\uc870 \uadf8\ub9ac\uace0 \uc774\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud55c \ud658\uacbd\uc774 \ubaa8\ub450 \ud3ec\ud568\ub41c \ud615\ud0dc")),(0,l.kt)("h4",{id:"data"},"Data"),(0,l.kt)("p",null,"\ub370\uc774\ud130\ub294 \uc804 \ucc98\ub9ac\ub41c \ud53c\ucc98, \ubaa8\ub378\uc758 \uc608\uce21 \uac12 \ub4f1\uc744 \ud3ec\ud568\ud569\ub2c8\ub2e4."),(0,l.kt)("h4",{id:"metric"},"Metric"),(0,l.kt)("p",null,"Metric\uc740 \ub3d9\uc801 \uc9c0\ud45c\uc640 \uc815\uc801 \uc9c0\ud45c \ub450 \uac00\uc9c0\ub85c \ub098\ub204\uc5c8\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"\ub3d9\uc801 \uc9c0\ud45c\ub780 train loss\uc640 \uac19\uc774 \ud559\uc2b5\uc774 \uc9c4\ud589\ub418\ub294 \uc911 \uc5d0\ud3ed(Epoch)\ub9c8\ub2e4 \uacc4\uc18d\ud574\uc11c \ubcc0\ud654\ud558\ub294 \uac12\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("li",{parentName:"ul"},"\uc815\uc801 \uc9c0\ud45c\ub780 \ud559\uc2b5\uc774 \ub05d\ub09c \ud6c4 \ucd5c\uc885\uc801\uc73c\ub85c \ubaa8\ub378\uc744 \ud3c9\uac00\ud558\ub294 \uc815\ud655\ub3c4 \ub4f1\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.")),(0,l.kt)("h2",{id:"pipeline"},"Pipeline"),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\ub294 \uc21c\uc11c\ub3c4\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uc21c\uc11c\ub3c4\ub294 \ubc29\ud5a5 \uc21c\ud658\uc774 \uc5c6\ub294 \uadf8\ub798\ud504\ub85c \uc774\ub8e8\uc5b4\uc838 \uc788\uc73c\uba70, \uac04\ub2e8\ud55c \uc870\uac74\ubb38\uc744 \ud3ec\ud568\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-6.png",src:n(9429).Z,width:"1696",height:"746"})),(0,l.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,l.kt)("p",null,"\uc55e\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\uc2dc\ud0a4\uae30 \uc704\ud574\uc11c\ub294 Config\uac00 \ud544\uc694\ud558\ub2e4\uace0 \uc124\uba85\ud588\uc2b5\ub2c8\ub2e4. \ud30c\uc774\ud504\ub77c\uc778\uc744 \uad6c\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 Config \ub4e4\uc744 \ubaa8\uc544 \ub454 \uac83\uc774 \ud30c\uc774\ud504\ub77c\uc778 Config\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-7.png",src:n(4607).Z,width:"1810",height:"432"})),(0,l.kt)("h2",{id:"run"},"Run"),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \ud544\uc694\ub85c \ud558\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\uac00 \uc8fc\uc5b4\uc838\uc57c\uc9c0\ub9cc \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Kubeflow\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc744 Run \uc774\ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-8.png",src:n(6818).Z,width:"1810",height:"576"})),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub418\uba74 \uac01 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\nKubeflow pipeline\uc5d0\uc11c\ub294 Run \ud558\ub098\ub2f9 \uace0\uc720\ud55c ID \ub97c \uc0dd\uc131\ud558\uace0, Run\uc5d0\uc11c \uc0dd\uc131\ub418\ub294 \ubaa8\ub4e0 \uc544\ud2f0\ud329\ud2b8\ub4e4\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"concept-9.png",src:n(7446).Z,width:"1810",height:"592"})),(0,l.kt)("p",null,"\uadf8\ub7ec\uba74 \uc774\uc81c \uc9c1\uc811 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},3396:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},8482:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},4582:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},3204:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},4175:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},7436:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},9429:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},4607:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},6818:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},7446:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file diff --git a/assets/js/b91e83a7.53320bf2.js b/assets/js/b91e83a7.ff8dee9e.js similarity index 99% rename from assets/js/b91e83a7.53320bf2.js rename to assets/js/b91e83a7.ff8dee9e.js index c345ec16..bfabbffe 100644 --- a/assets/js/b91e83a7.53320bf2.js +++ b/assets/js/b91e83a7.ff8dee9e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[560],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>_});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function p(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),d=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):p(p({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(l.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,o=t.originalType,l=t.parentName,s=i(t,["components","mdxType","originalType","parentName"]),u=d(n),m=r,_=u["".concat(l,".").concat(m)]||u[m]||c[m]||o;return n?a.createElement(_,p(p({ref:e},s),{},{components:n})):a.createElement(_,p({ref:e},s))}));function _(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var o=n.length,p=new Array(o);p[0]=m;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[u]="string"==typeof t?t:r,p[1]=i;for(var d=2;d{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>p,default:()=>c,frontMatter:()=>o,metadata:()=>i,toc:()=>d});var a=n(7462),r=(n(7294),n(3905));const o={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},p=void 0,i={unversionedId:"kubeflow/how-to-debug",id:"version-1.0/kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/docs/1.0/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/how-to-debug.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/docs/1.0/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/docs/1.0/api-deployment/what-is-api-deployment"}},l={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...o}=t;return(0,r.kt)(u,(0,a.Z)({},s,o,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 Kubeflow \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub514\ubc84\uae45\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"failed-component"},"Failed Component"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," \uc5d0\uc11c \uc774\uc6a9\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc870\uae08 \uc218\uc815\ud574\uc11c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud328\ud558\ub3c4\ub85d \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubcc0\uacbd\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,r.kt)("p",null,"\uc218\uc815\ud55c \uc810\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc624\ub294 ",(0,r.kt)("inlineCode",{parentName:"li"},"load_iris_data")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," \ud53c\ucc98\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"li"},"None")," \uac12\uc744 \uc8fc\uc785"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"drop_na()")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 na \uac12\uc774 \ud3ec\ud568\ub41c ",(0,r.kt)("inlineCode",{parentName:"li"},"row"),"\ub97c \uc81c\uac70")),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc2e4\ud589 \ud6c4 Run\uc744 \ub20c\ub7ec\uc11c \ud655\uc778\ud574\ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"Train from csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc2e4\ud328\ud588\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-0.png",src:n(4159).Z,width:"2826",height:"1790"})),(0,r.kt)("p",null,"\uc2e4\ud328\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uc11c \uc2e4\ud328\ud55c \uc774\uc720\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-2.png",src:n(5419).Z,width:"2826",height:"1796"})),(0,r.kt)("p",null,"\ub85c\uadf8\ub97c \ud655\uc778\ud558\uba74 \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uc774\uc5ec\uc11c \uc2e4\ud589\ub418\uc9c0 \uc54a\uc558\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ubd84\uba85 \uc815\uc0c1\uc801\uc73c\ub85c \ub370\uc774\ud130\ub97c \uc804\ub2ec\ud588\ub294\ub370 \uc65c \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uac1c\uc77c\uae4c\uc694? "),(0,r.kt)("p",null,"\uc774\uc81c \uc785\ub825\ubc1b\uc740 \ub370\uc774\ud130\uc5d0 \uc5b4\ub5a4 \ubb38\uc81c\uac00 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 Input/Ouput \ud0ed\uc5d0\uc11c \uc785\ub825\uac12\uc73c\ub85c \ub4e4\uc5b4\uac04 \ub370\uc774\ud130\ub4e4\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc6b4\ub85c\ub4dc\ub294 \ube68\uac04\uc0c9 \ub124\ubaa8\ub85c \ud45c\uc2dc\ub41c \uacf3\uc758 \ub9c1\ud06c\ub97c \ud074\ub9ad\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-5.png",src:n(2286).Z,width:"2690",height:"1740"})),(0,r.kt)("p",null,"\ub450 \uac1c\uc758 \ud30c\uc77c\uc744 \uac19\uc740 \uacbd\ub85c\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub9ac\uace0 \ud574\ub2f9 \uacbd\ub85c\ub85c \uc774\ub3d9\ud574\uc11c \ud30c\uc77c\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ud30c\uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,r.kt)("p",null,"\uc555\ucd95\uc744 \ud480\uc5b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 \uc774\ub97c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc744 \uc774\uc6a9\ud574 \ucef4\ud3ec\ub10c\ud2b8 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-3.png",src:n(4180).Z,width:"2434",height:"1690"})),(0,r.kt)("p",null,"\ub514\ubc84\uae45\uc744 \ud574\ubcf8 \uacb0\uacfc dropna \ud560 \ub54c column\uc744 \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc57c \ud558\ub294\ub370 row\ub97c \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc11c \ub370\uc774\ud130\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.\n\uc774\uc81c \ubb38\uc81c\uc758 \uc6d0\uc778\uc744 \uc54c\uc544\ub0c8\uc73c\ub2c8 column\uc744 \uae30\uc900\uc73c\ub85c drop\uc774 \ub418\uac8c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,r.kt)("p",null,"\uc218\uc815 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub2e4\uc2dc \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-6.png",src:n(6047).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},4159:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},5419:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},4180:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},2286:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},6047:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[560],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>_});var a=n(7294);function r(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function p(t){for(var e=1;e=0||(r[n]=t[n]);return r}(t,e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(r[n]=t[n])}return r}var l=a.createContext({}),d=function(t){var e=a.useContext(l),n=e;return t&&(n="function"==typeof t?t(e):p(p({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(l.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,r=t.mdxType,o=t.originalType,l=t.parentName,s=i(t,["components","mdxType","originalType","parentName"]),u=d(n),m=r,_=u["".concat(l,".").concat(m)]||u[m]||c[m]||o;return n?a.createElement(_,p(p({ref:e},s),{},{components:n})):a.createElement(_,p({ref:e},s))}));function _(t,e){var n=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var o=n.length,p=new Array(o);p[0]=m;var i={};for(var l in e)hasOwnProperty.call(e,l)&&(i[l]=e[l]);i.originalType=t,i[u]="string"==typeof t?t:r,p[1]=i;for(var d=2;d{n.r(e),n.d(e,{assets:()=>l,contentTitle:()=>p,default:()=>c,frontMatter:()=>o,metadata:()=>i,toc:()=>d});var a=n(7462),r=(n(7294),n(3905));const o={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},p=void 0,i={unversionedId:"kubeflow/how-to-debug",id:"version-1.0/kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/docs/1.0/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/how-to-debug.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/docs/1.0/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/docs/1.0/api-deployment/what-is-api-deployment"}},l={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...o}=t;return(0,r.kt)(u,(0,a.Z)({},s,o,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 Kubeflow \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub514\ubc84\uae45\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubd05\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"failed-component"},"Failed Component"),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," \uc5d0\uc11c \uc774\uc6a9\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc870\uae08 \uc218\uc815\ud574\uc11c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud328\ud558\ub3c4\ub85d \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubcc0\uacbd\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,r.kt)("p",null,"\uc218\uc815\ud55c \uc810\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"\ub370\uc774\ud130\ub97c \ubd88\ub7ec\uc624\ub294 ",(0,r.kt)("inlineCode",{parentName:"li"},"load_iris_data")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," \ud53c\ucc98\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"li"},"None")," \uac12\uc744 \uc8fc\uc785"),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"li"},"drop_na()")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 na \uac12\uc774 \ud3ec\ud568\ub41c ",(0,r.kt)("inlineCode",{parentName:"li"},"row"),"\ub97c \uc81c\uac70")),(0,r.kt)("p",null,"\uc774\uc81c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc2e4\ud589 \ud6c4 Run\uc744 \ub20c\ub7ec\uc11c \ud655\uc778\ud574\ubcf4\uba74 ",(0,r.kt)("inlineCode",{parentName:"p"},"Train from csv")," \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc2e4\ud328\ud588\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-0.png",src:n(4159).Z,width:"2826",height:"1790"})),(0,r.kt)("p",null,"\uc2e4\ud328\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uc11c \uc2e4\ud328\ud55c \uc774\uc720\ub97c \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-2.png",src:n(5419).Z,width:"2826",height:"1796"})),(0,r.kt)("p",null,"\ub85c\uadf8\ub97c \ud655\uc778\ud558\uba74 \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uc774\uc5ec\uc11c \uc2e4\ud589\ub418\uc9c0 \uc54a\uc558\ub2e4\uace0 \ub098\uc635\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ubd84\uba85 \uc815\uc0c1\uc801\uc73c\ub85c \ub370\uc774\ud130\ub97c \uc804\ub2ec\ud588\ub294\ub370 \uc65c \ub370\uc774\ud130\uc758 \uac1c\uc218\uac00 0\uac1c\uc77c\uae4c\uc694? "),(0,r.kt)("p",null,"\uc774\uc81c \uc785\ub825\ubc1b\uc740 \ub370\uc774\ud130\uc5d0 \uc5b4\ub5a4 \ubb38\uc81c\uac00 \uc788\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ud074\ub9ad\ud558\uace0 Input/Ouput \ud0ed\uc5d0\uc11c \uc785\ub825\uac12\uc73c\ub85c \ub4e4\uc5b4\uac04 \ub370\uc774\ud130\ub4e4\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc6b4\ub85c\ub4dc\ub294 \ube68\uac04\uc0c9 \ub124\ubaa8\ub85c \ud45c\uc2dc\ub41c \uacf3\uc758 \ub9c1\ud06c\ub97c \ud074\ub9ad\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-5.png",src:n(2286).Z,width:"2690",height:"1740"})),(0,r.kt)("p",null,"\ub450 \uac1c\uc758 \ud30c\uc77c\uc744 \uac19\uc740 \uacbd\ub85c\uc5d0 \ub2e4\uc6b4\ub85c\ub4dc\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uadf8\ub9ac\uace0 \ud574\ub2f9 \uacbd\ub85c\ub85c \uc774\ub3d9\ud574\uc11c \ud30c\uc77c\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ub450 \uac1c\uc758 \ud30c\uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,r.kt)("p",null,"\uc555\ucd95\uc744 \ud480\uc5b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 \uc774\ub97c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc744 \uc774\uc6a9\ud574 \ucef4\ud3ec\ub10c\ud2b8 \ucf54\ub4dc\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-3.png",src:n(4180).Z,width:"2434",height:"1690"})),(0,r.kt)("p",null,"\ub514\ubc84\uae45\uc744 \ud574\ubcf8 \uacb0\uacfc dropna \ud560 \ub54c column\uc744 \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc57c \ud558\ub294\ub370 row\ub97c \uae30\uc900\uc73c\ub85c drop\uc744 \ud574\uc11c \ub370\uc774\ud130\uac00 \ubaa8\ub450 \uc0ac\ub77c\uc84c\uc2b5\ub2c8\ub2e4.\n\uc774\uc81c \ubb38\uc81c\uc758 \uc6d0\uc778\uc744 \uc54c\uc544\ub0c8\uc73c\ub2c8 column\uc744 \uae30\uc900\uc73c\ub85c drop\uc774 \ub418\uac8c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,r.kt)("p",null,"\uc218\uc815 \ud6c4 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub2e4\uc2dc \uc5c5\ub85c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"debug-6.png",src:n(6047).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},4159:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},5419:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},4180:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},2286:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},6047:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file diff --git a/assets/js/b93cd888.97e0c552.js b/assets/js/b93cd888.5e05879c.js similarity index 99% rename from assets/js/b93cd888.97e0c552.js rename to assets/js/b93cd888.5e05879c.js index 30673f73..838a8f7d 100644 --- a/assets/js/b93cd888.97e0c552.js +++ b/assets/js/b93cd888.5e05879c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[797],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(a),k=r,d=c["".concat(i,".").concat(k)]||c[k]||m[k]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=k;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:r,l[1]=s;for(var p=2;p{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/docs/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/docs/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-prerequisite.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/docs/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},p=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],u={toc:p},c="wrapper";function m(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\uc774 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc124\uce58\ud558\uae30\uc5d0 \uc55e\uc11c, ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \uc124\uce58 \ud639\uc740 \uc124\uc815\ud574\ub450\uc5b4\uc57c \ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc5d0 \ub300\ud55c \ub9e4\ub274\uc5bc\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"\ucd94\ud6c4 \ud074\ub77c\uc774\uc5b8\ud2b8\uc640 \ud074\ub7ec\uc2a4\ud130\uc758 \uc6d0\ud65c\ud55c \ud1b5\uc2e0\uc744 \uc704\ud574\uc11c\ub294 Port-Forwarding\uc744 \uc218\ud589\ud574\uc57c \ud560 \uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4.\nPort-Forwarding\uc744 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0 \ub2e4\uc74c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4 \uc124\uce58\uc5d0 \ud544\uc694\ud55c APT \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uc758 \uacf5\uc2dd GPG key\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"apt \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc800\ub85c \ub3c4\ucee4\ub97c \uc124\uce58\ud560 \ub54c, stable Repository\uc5d0\uc11c \ubc1b\uc544\uc624\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \uc124\uce58\ud560 \uc218 \uc788\ub294 \ub3c4\ucee4 \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"\ucd9c\ub825\ub418\ub294 \ubc84\uc804 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc774 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\uac00 \ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc758 \ub3c4\ucee4\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"docker \uad00\ub828 command\ub97c sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud558\ub3c4\ub85d \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uad8c\ud55c\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 docker command\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ub41c \uac83\uc744 \ud655\uc778\ud558\uae30 \uc704\ud574, \ub2e4\uc2dc \ud55c\ubc88 docker run\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uad8c\ud55c\uc774 \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"kubelet \uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uac8c \ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uc5d0\uc11c swap\uc774\ub77c\uace0 \ubd88\ub9ac\ub294 \uac00\uc0c1\uba54\ubaa8\ub9ac\ub97c \uaebc \ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 swap\uc744 \uaebc \ub461\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(\ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8\ub97c \uac19\uc740 \ub370\uc2a4\ud06c\ud1b1\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ub54c swap \uba54\ubaa8\ub9ac\ub97c \uc885\ub8cc\ud558\uba74 \uc18d\ub3c4\uc758 \uc800\ud558\uac00 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4)")," "),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl \uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 API\ub97c \uc694\uccad\ud560 \ub54c \uc0ac\uc6a9\ud558\ub294 \ud074\ub77c\uc774\uc5b8\ud2b8 \ud234\uc785\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0 \uc124\uce58\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kubectl v1.21.7 \ubc84\uc804\uc744 \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kubectl \uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \ud30c\uc77c\uc758 \uad8c\ud55c\uacfc \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc5ec\ub7ec \uac1c\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, \uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c\uc744 \uad00\ub9ac\ud574\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c \ud639\uc740 \uc5ec\ub7ec \uac1c\uc758 kube-context\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ubc29\ubc95\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \ubb38\uc11c\ub97c \ucc38\uace0\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"https://github.com/ahmetb/kubectx"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/docs/tasks/tools/install-kubectl-linux/"},"\ub9ac\ub205\uc2a4\uc5d0 kubectl \uc124\uce58 \ubc0f \uc124\uc815"))))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[797],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>d});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=n.createContext({}),p=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(a),k=r,d=c["".concat(i,".").concat(k)]||c[k]||m[k]||o;return a?n.createElement(d,l(l({ref:t},u),{},{components:a})):n.createElement(d,l({ref:t},u))}));function d(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=k;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:r,l[1]=s;for(var p=2;p{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>m,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var n=a(7462),r=(a(7294),a(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/docs/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/docs/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-prerequisite.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/docs/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},p=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],u={toc:p},c="wrapper";function m(e){let{components:t,...a}=e;return(0,r.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\uc774 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc124\uce58\ud558\uae30\uc5d0 \uc55e\uc11c, ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \uc124\uce58 \ud639\uc740 \uc124\uc815\ud574\ub450\uc5b4\uc57c \ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc5d0 \ub300\ud55c \ub9e4\ub274\uc5bc\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"\ucd94\ud6c4 \ud074\ub77c\uc774\uc5b8\ud2b8\uc640 \ud074\ub7ec\uc2a4\ud130\uc758 \uc6d0\ud65c\ud55c \ud1b5\uc2e0\uc744 \uc704\ud574\uc11c\ub294 Port-Forwarding\uc744 \uc218\ud589\ud574\uc57c \ud560 \uc77c\uc774 \uc788\uc2b5\ub2c8\ub2e4.\nPort-Forwarding\uc744 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0 \ub2e4\uc74c \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4 \uc124\uce58\uc5d0 \ud544\uc694\ud55c APT \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uc758 \uacf5\uc2dd GPG key\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"apt \ud328\ud0a4\uc9c0 \ub9e4\ub2c8\uc800\ub85c \ub3c4\ucee4\ub97c \uc124\uce58\ud560 \ub54c, stable Repository\uc5d0\uc11c \ubc1b\uc544\uc624\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \uc124\uce58\ud560 \uc218 \uc788\ub294 \ub3c4\ucee4 \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"\ucd9c\ub825\ub418\ub294 \ubc84\uc804 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc774 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\uac00 \ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," \ubc84\uc804\uc758 \ub3c4\ucee4\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub3c4\ucee4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"docker \uad00\ub828 command\ub97c sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud558\ub3c4\ub85d \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uad8c\ud55c\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"sudo \ud0a4\uc6cc\ub4dc \uc5c6\uc774 docker command\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ub41c \uac83\uc744 \ud655\uc778\ud558\uae30 \uc704\ud574, \ub2e4\uc2dc \ud55c\ubc88 docker run\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"\uba85\ub839\uc5b4 \uc2e4\ud589 \ud6c4 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uad8c\ud55c\uc774 \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"kubelet \uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uac8c \ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uc5d0\uc11c swap\uc774\ub77c\uace0 \ubd88\ub9ac\ub294 \uac00\uc0c1\uba54\ubaa8\ub9ac\ub97c \uaebc \ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 swap\uc744 \uaebc \ub461\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(\ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8\ub97c \uac19\uc740 \ub370\uc2a4\ud06c\ud1b1\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ub54c swap \uba54\ubaa8\ub9ac\ub97c \uc885\ub8cc\ud558\uba74 \uc18d\ub3c4\uc758 \uc800\ud558\uac00 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4)")," "),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl \uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 API\ub97c \uc694\uccad\ud560 \ub54c \uc0ac\uc6a9\ud558\ub294 \ud074\ub77c\uc774\uc5b8\ud2b8 \ud234\uc785\ub2c8\ub2e4. ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0 \uc124\uce58\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud604\uc7ac \ud3f4\ub354\uc5d0 kubectl v1.21.7 \ubc84\uc804\uc744 \ub2e4\uc6b4\ubc1b\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"kubectl \uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \ud30c\uc77c\uc758 \uad8c\ud55c\uacfc \uc704\uce58\ub97c \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc5ec\ub7ec \uac1c\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0, \uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c\uc744 \uad00\ub9ac\ud574\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc5ec\ub7ec \uac1c\uc758 kubeconfig \ud30c\uc77c \ud639\uc740 \uc5ec\ub7ec \uac1c\uc758 kube-context\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ubc29\ubc95\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \ubb38\uc11c\ub97c \ucc38\uace0\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"https://github.com/ahmetb/kubectx"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/docs/tasks/tools/install-kubectl-linux/"},"\ub9ac\ub205\uc2a4\uc5d0 kubectl \uc124\uce58 \ubc0f \uc124\uc815"))))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/bb396da7.fad17c4c.js b/assets/js/bb396da7.357976ee.js similarity index 97% rename from assets/js/bb396da7.fad17c4c.js rename to assets/js/bb396da7.357976ee.js index 79212985..daf4518c 100644 --- a/assets/js/bb396da7.fad17c4c.js +++ b/assets/js/bb396da7.357976ee.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5642],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),d=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=d(e.components);return o.createElement(l.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=d(r),b=n,f=u["".concat(l,".").concat(b)]||u[b]||c[b]||a;return r?o.createElement(f,s(s({ref:t},p),{},{components:r})):o.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[u]="string"==typeof e?e:n,s[1]=i;for(var d=2;d{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>d});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/docs/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/docs/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/docs/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/docs/kubeflow-dashboard-guide/volumes"}},l={},d=[],p={toc:d},u="wrapper";function c(e){let{components:t,...a}=e;return(0,n.kt)(u,(0,o.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Tensorboards\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(1076).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"Tensorboards \ud0ed\uc740 Tensorflow, PyTorch \ub4f1\uc758 \ud504\ub808\uc784\uc6cc\ud06c\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 Tensorboard \uc720\ud2f8\uc774 \uc0dd\uc131\ud55c ML \ud559\uc2b5 \uad00\ub828 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84(Tensorboard Server)\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc0dd\uc131\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc774\ub807\uac8c \uc0dd\uc131\ud55c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\ub294, \uc77c\ubc18\uc801\uc778 \uc6d0\uaca9 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc758 \uc0ac\uc6a9\ubc95\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc73c\uba70, ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc5d0\uc11c \ubc14\ub85c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\ub294 \uc6a9\ub3c4"),"\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc758 \uacb0\uacfc\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ubc29\ubc95\uc5d0\ub294 ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/"},"\ub2e4\uc591\ud55c \ubc29\uc2dd"),"\uc774 \uc788\uc73c\uba70, ",(0,n.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub354 \uc77c\ubc18\uc801\uc73c\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d Kubeflow \ucef4\ud3ec\ub10c\ud2b8\uc758 Visualization \uae30\ub2a5\uacfc MLflow\uc758 \uc2dc\uac01\ud654 \uae30\ub2a5\uc744 \ud65c\uc6a9\ud560 \uc608\uc815\uc774\ubbc0\ub85c, Tensorboards \ud398\uc774\uc9c0\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 \uc0dd\ub7b5\ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0},7511:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1076:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5642],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),d=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=d(e.components);return o.createElement(l.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=d(r),b=n,f=u["".concat(l,".").concat(b)]||u[b]||c[b]||a;return r?o.createElement(f,s(s({ref:t},p),{},{components:r})):o.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[u]="string"==typeof e?e:n,s[1]=i;for(var d=2;d{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>a,metadata:()=>i,toc:()=>d});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/docs/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/docs/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/docs/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/docs/kubeflow-dashboard-guide/volumes"}},l={},d=[],p={toc:d},u="wrapper";function c(e){let{components:t,...a}=e;return(0,n.kt)(u,(0,o.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Tensorboards\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(1076).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"Tensorboards \ud0ed\uc740 Tensorflow, PyTorch \ub4f1\uc758 \ud504\ub808\uc784\uc6cc\ud06c\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 Tensorboard \uc720\ud2f8\uc774 \uc0dd\uc131\ud55c ML \ud559\uc2b5 \uad00\ub828 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84(Tensorboard Server)\ub97c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc0dd\uc131\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uc774\ub807\uac8c \uc0dd\uc131\ud55c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\ub294, \uc77c\ubc18\uc801\uc778 \uc6d0\uaca9 \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc758 \uc0ac\uc6a9\ubc95\uacfc \uac19\uc774 \uc0ac\uc6a9\ud560 \uc218\ub3c4 \uc788\uc73c\uba70, ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc5d0\uc11c \ubc14\ub85c \ud150\uc11c\ubcf4\ub4dc \uc11c\ubc84\uc5d0 \ub370\uc774\ud130\ub97c \uc800\uc7a5\ud558\ub294 \uc6a9\ub3c4"),"\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,n.kt)("p",null,"Kubeflow \ud30c\uc774\ud504\ub77c\uc778 \ub7f0\uc758 \uacb0\uacfc\ub97c \uc2dc\uac01\ud654\ud558\ub294 \ubc29\ubc95\uc5d0\ub294 ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/"},"\ub2e4\uc591\ud55c \ubc29\uc2dd"),"\uc774 \uc788\uc73c\uba70, ",(0,n.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub354 \uc77c\ubc18\uc801\uc73c\ub85c \ud65c\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d Kubeflow \ucef4\ud3ec\ub10c\ud2b8\uc758 Visualization \uae30\ub2a5\uacfc MLflow\uc758 \uc2dc\uac01\ud654 \uae30\ub2a5\uc744 \ud65c\uc6a9\ud560 \uc608\uc815\uc774\ubbc0\ub85c, Tensorboards \ud398\uc774\uc9c0\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \uc124\uba85\uc740 \uc0dd\ub7b5\ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0},7511:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1076:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file diff --git a/assets/js/be2f486c.a11ca60b.js b/assets/js/be2f486c.1f45d851.js similarity index 99% rename from assets/js/be2f486c.a11ca60b.js rename to assets/js/be2f486c.1f45d851.js index 5207a3fd..6b505424 100644 --- a/assets/js/be2f486c.a11ca60b.js +++ b/assets/js/be2f486c.1f45d851.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9398],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function s(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=p(t),u=l,g=m["".concat(i,".").concat(u)]||m[u]||c[u]||r;return t?a.createElement(g,s(s({ref:n},d),{},{components:t})):a.createElement(g,s({ref:n},d))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,s=new Array(r);s[0]=u;var o={};for(var i in n)hasOwnProperty.call(n,i)&&(o[i]=n[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const r={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},s=void 0,o={unversionedId:"api-deployment/seldon-iris",id:"api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/docs/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/docs/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-iris.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/docs/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/docs/api-deployment/seldon-pg"}},i={},p=[{value:"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30",id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:3},{value:"2. \uc2a4\ud399 \uc815\uc758",id:"2-\uc2a4\ud399-\uc815\uc758",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Swagger \uc811\uc18d",id:"1-swagger-\uc811\uc18d",level:3},{value:"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd",id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd",level:3},{value:"3. Try it out \uc120\ud0dd",id:"3-try-it-out-\uc120\ud0dd",level:3},{value:"4. Request body\uc5d0 data \uc785\ub825",id:"4-request-body\uc5d0-data-\uc785\ub825",level:3},{value:"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778",id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778",level:3},{value:"Using CLI",id:"using-cli",level:2}],d={toc:p},m="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(m,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30"},"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30"),(0,l.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\uc744 \ub54c SeldonDeployment\ub97c \ud1b5\ud574 API Deployment\ub97c \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\nSeldonDeployment\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\uc5d0 \ubaa8\ub378\uc744 REST/gRPC \uc11c\ubc84\uc758 \ud615\ud0dc\ub85c \ubc30\ud3ec\ud558\uae30 \uc704\ud574 \uc815\uc758\ub41c CRD(CustomResourceDefinition)\uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"SeldonDeployment \uad00\ub828\ub41c \uc2e4\uc2b5\uc740 seldon-deploy\ub77c\ub294 \uc0c8\ub85c\uc6b4 \ub124\uc784\uc2a4\ud398\uc774\uc2a4(namespace)\uc5d0\uc11c \uc9c4\ud589\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub97c \uc0dd\uc131\ud55c \ub4a4, seldon-deploy\ub97c \ud604\uc7ac \ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-\uc2a4\ud399-\uc815\uc758"},"2. \uc2a4\ud399 \uc815\uc758"),(0,l.kt)("p",null,"SeldonDeployment\ub97c \ubc30\ud3ec\ud558\uae30 \uc704\ud55c yaml \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uacf5\uac1c\ub41c iris model\uc744 \uc0ac\uc6a9\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\uc774 iris model\uc740 sklearn \ud504\ub808\uc784\uc6cc\ud06c\ub97c \ud1b5\ud574 \ud559\uc2b5\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 SKLEARN_SERVER\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"yaml \ud30c\uc77c\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uac00 \ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"\uc774\uc81c \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ucd94\ub860 \uc694\uccad(predict request)\ub97c \ubcf4\ub0b4\uc11c \ucd94\ub860 \uacb0\uad0f\uac12\uc744 \ubc1b\uc544\uc635\ub2c8\ub2e4.\n\ubc30\ud3ec\ub41c API\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uaddc\uce59\uc73c\ub85c \uc0dd\uc131\ub429\ub2c8\ub2e4.\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-seldon"},"Seldon Core \uc124\uce58 \uc2dc, Ambassador\ub97c Ingress Controller\ub85c \uc124\uc815\ud558\uc600\uc73c\ubbc0\ub85c"),", SeldonDeployment\ub85c \uc0dd\uc131\ub41c API \uc11c\ubc84\ub294 \ubaa8\ub450 Ambassador\uc758 Ingress gateway\ub97c \ud1b5\ud574 \uc694\uccad\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub530\ub77c\uc11c \uc6b0\uc120 Ambassador Ingress Gateway\uc758 url\uc744 \ud658\uacbd \ubcc0\uc218\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"\uc124\uc815\ub41c url\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uc5b4\uc57c \ud558\uba70, \ud074\ub77c\uc6b0\ub4dc \ub4f1\uc744 \ud1b5\ud574 \uc124\uc815\ud560 \uacbd\uc6b0, internal ip \uc8fc\uc18c\uac00 \uc124\uc815\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"SeldonDeployment\uac00 \ubc30\ud3ec\ub41c ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\uc640 ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\n\uc774\ub294 \uc2a4\ud399\uc744 \uc815\uc758\ud560 \ub54c metadata\uc5d0 \uc815\uc758\ub41c \uac12\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\ub294 seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\uc740 sklearn \uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"SeldonDeployment\uc5d0\uc11c \uc8fc\ub85c \uc0ac\uc6a9\ud558\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name"),"\uc740 \ub450 \uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"\uac01\uac01\uc758 method\uc758 \uc790\uc138\ud55c \uc0ac\uc6a9 \ubc29\ubc95\uc740 \uc544\ub798\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"\uc6b0\uc120 doc method\ub97c \uc0ac\uc6a9\ud558\ub294 \ubc29\ubc95\uc785\ub2c8\ub2e4. doc method\ub97c \uc774\uc6a9\ud558\uba74 seldon\uc5d0\uc11c \uc0dd\uc131\ud55c swagger\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-swagger-\uc811\uc18d"},"1. Swagger \uc811\uc18d"),(0,l.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c ingress url \uaddc\uce59\uc5d0 \ub530\ub77c \uc544\ub798 \uc8fc\uc18c\ub97c \ud1b5\ud574 swagger\uc5d0 \uc811\uadfc\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(860).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd"},"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd"),(0,l.kt)("p",null,"UI\uc5d0\uc11c ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," \uba54\ub274\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(4835).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-try-it-out-\uc120\ud0dd"},"3. ",(0,l.kt)("em",{parentName:"h3"},"Try it out")," \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(3729).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-request-body\uc5d0-data-\uc785\ub825"},"4. Request body\uc5d0 data \uc785\ub825"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(2821).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"\ub2e4\uc74c \ub370\uc774\ud130\ub97c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778"},"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," \ubc84\ud2bc\uc744 \ub20c\ub7ec\uc11c \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(1150).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\ub860 \uacb0\uacfc\ub97c \uc5bb\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"\ub610\ud55c, curl\uacfc \uac19\uc740 http client CLI \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc11c\ub3c4 API \uc694\uccad\uc744 \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions"),"\ub97c \uc694\uccad\ud558\uba74"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \uc751\ub2f5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}c.isMDXComponent=!0},860:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},4835:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},3729:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},2821:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},1150:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9398],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function s(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),p=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=p(e.components);return a.createElement(i.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=p(t),u=l,g=m["".concat(i,".").concat(u)]||m[u]||c[u]||r;return t?a.createElement(g,s(s({ref:n},d),{},{components:t})):a.createElement(g,s({ref:n},d))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,s=new Array(r);s[0]=u;var o={};for(var i in n)hasOwnProperty.call(n,i)&&(o[i]=n[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const r={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},s=void 0,o={unversionedId:"api-deployment/seldon-iris",id:"api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/docs/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/docs/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-iris.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/docs/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/docs/api-deployment/seldon-pg"}},i={},p=[{value:"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30",id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:3},{value:"2. \uc2a4\ud399 \uc815\uc758",id:"2-\uc2a4\ud399-\uc815\uc758",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Swagger \uc811\uc18d",id:"1-swagger-\uc811\uc18d",level:3},{value:"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd",id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd",level:3},{value:"3. Try it out \uc120\ud0dd",id:"3-try-it-out-\uc120\ud0dd",level:3},{value:"4. Request body\uc5d0 data \uc785\ub825",id:"4-request-body\uc5d0-data-\uc785\ub825",level:3},{value:"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778",id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778",level:3},{value:"Using CLI",id:"using-cli",level:2}],d={toc:p},m="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(m,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"seldondeployment\ub97c-\ud1b5\ud574-\ubc30\ud3ec\ud558\uae30"},"SeldonDeployment\ub97c \ud1b5\ud574 \ubc30\ud3ec\ud558\uae30"),(0,l.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\uc744 \ub54c SeldonDeployment\ub97c \ud1b5\ud574 API Deployment\ub97c \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\nSeldonDeployment\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\uc5d0 \ubaa8\ub378\uc744 REST/gRPC \uc11c\ubc84\uc758 \ud615\ud0dc\ub85c \ubc30\ud3ec\ud558\uae30 \uc704\ud574 \uc815\uc758\ub41c CRD(CustomResourceDefinition)\uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"SeldonDeployment \uad00\ub828\ub41c \uc2e4\uc2b5\uc740 seldon-deploy\ub77c\ub294 \uc0c8\ub85c\uc6b4 \ub124\uc784\uc2a4\ud398\uc774\uc2a4(namespace)\uc5d0\uc11c \uc9c4\ud589\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub97c \uc0dd\uc131\ud55c \ub4a4, seldon-deploy\ub97c \ud604\uc7ac \ub124\uc784\uc2a4\ud398\uc774\uc2a4\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-\uc2a4\ud399-\uc815\uc758"},"2. \uc2a4\ud399 \uc815\uc758"),(0,l.kt)("p",null,"SeldonDeployment\ub97c \ubc30\ud3ec\ud558\uae30 \uc704\ud55c yaml \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uacf5\uac1c\ub41c iris model\uc744 \uc0ac\uc6a9\ud558\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.\n\uc774 iris model\uc740 sklearn \ud504\ub808\uc784\uc6cc\ud06c\ub97c \ud1b5\ud574 \ud559\uc2b5\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 SKLEARN_SERVER\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"yaml \ud30c\uc77c\uc744 \ubc30\ud3ec\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\uac00 \ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"\uc774\uc81c \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ucd94\ub860 \uc694\uccad(predict request)\ub97c \ubcf4\ub0b4\uc11c \ucd94\ub860 \uacb0\uad0f\uac12\uc744 \ubc1b\uc544\uc635\ub2c8\ub2e4.\n\ubc30\ud3ec\ub41c API\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uaddc\uce59\uc73c\ub85c \uc0dd\uc131\ub429\ub2c8\ub2e4.\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-seldon"},"Seldon Core \uc124\uce58 \uc2dc, Ambassador\ub97c Ingress Controller\ub85c \uc124\uc815\ud558\uc600\uc73c\ubbc0\ub85c"),", SeldonDeployment\ub85c \uc0dd\uc131\ub41c API \uc11c\ubc84\ub294 \ubaa8\ub450 Ambassador\uc758 Ingress gateway\ub97c \ud1b5\ud574 \uc694\uccad\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub530\ub77c\uc11c \uc6b0\uc120 Ambassador Ingress Gateway\uc758 url\uc744 \ud658\uacbd \ubcc0\uc218\ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"\uc124\uc815\ub41c url\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uc5b4\uc57c \ud558\uba70, \ud074\ub77c\uc6b0\ub4dc \ub4f1\uc744 \ud1b5\ud574 \uc124\uc815\ud560 \uacbd\uc6b0, internal ip \uc8fc\uc18c\uac00 \uc124\uc815\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"SeldonDeployment\uac00 \ubc30\ud3ec\ub41c ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\uc640 ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\n\uc774\ub294 \uc2a4\ud399\uc744 \uc815\uc758\ud560 \ub54c metadata\uc5d0 \uc815\uc758\ub41c \uac12\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace"),"\ub294 seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name"),"\uc740 sklearn \uc785\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"SeldonDeployment\uc5d0\uc11c \uc8fc\ub85c \uc0ac\uc6a9\ud558\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name"),"\uc740 \ub450 \uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"\uac01\uac01\uc758 method\uc758 \uc790\uc138\ud55c \uc0ac\uc6a9 \ubc29\ubc95\uc740 \uc544\ub798\uc5d0\uc11c \uc124\uba85\ud569\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"\uc6b0\uc120 doc method\ub97c \uc0ac\uc6a9\ud558\ub294 \ubc29\ubc95\uc785\ub2c8\ub2e4. doc method\ub97c \uc774\uc6a9\ud558\uba74 seldon\uc5d0\uc11c \uc0dd\uc131\ud55c swagger\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-swagger-\uc811\uc18d"},"1. Swagger \uc811\uc18d"),(0,l.kt)("p",null,"\uc704\uc5d0\uc11c \uc124\uba85\ud55c ingress url \uaddc\uce59\uc5d0 \ub530\ub77c \uc544\ub798 \uc8fc\uc18c\ub97c \ud1b5\ud574 swagger\uc5d0 \uc811\uadfc\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(860).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-swagger-predictions-\uba54\ub274-\uc120\ud0dd"},"2. Swagger Predictions \uba54\ub274 \uc120\ud0dd"),(0,l.kt)("p",null,"UI\uc5d0\uc11c ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," \uba54\ub274\ub97c \uc120\ud0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(4835).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-try-it-out-\uc120\ud0dd"},"3. ",(0,l.kt)("em",{parentName:"h3"},"Try it out")," \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(3729).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-request-body\uc5d0-data-\uc785\ub825"},"4. Request body\uc5d0 data \uc785\ub825"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(2821).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"\ub2e4\uc74c \ub370\uc774\ud130\ub97c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-\ucd94\ub860-\uacb0\uacfc-\ud655\uc778"},"5. \ucd94\ub860 \uacb0\uacfc \ud655\uc778"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," \ubc84\ud2bc\uc744 \ub20c\ub7ec\uc11c \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(1150).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ucd94\ub860 \uacb0\uacfc\ub97c \uc5bb\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"\ub610\ud55c, curl\uacfc \uac19\uc740 http client CLI \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc11c\ub3c4 API \uc694\uccad\uc744 \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\uc74c\uacfc \uac19\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions"),"\ub97c \uc694\uccad\ud558\uba74"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"\uc544\ub798\uc640 \uac19\uc740 \uc751\ub2f5\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}c.isMDXComponent=!0},860:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},4835:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},3729:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},2821:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},1150:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file diff --git a/assets/js/bf383222.8ae7415e.js b/assets/js/bf383222.e7c7553b.js similarity index 98% rename from assets/js/bf383222.8ae7415e.js rename to assets/js/bf383222.e7c7553b.js index 51215863..3fef98e0 100644 --- a/assets/js/bf383222.8ae7415e.js +++ b/assets/js/bf383222.e7c7553b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6661],{3905:(e,t,r)=>{r.d(t,{Zo:()=>l,kt:()=>k});var n=r(7294);function s(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function u(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(s[r]=e[r]);return s}(e,t);if(Object.getOwnPropertySymbols){var u=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(s[r]=e[r])}return s}var p=n.createContext({}),i=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},l=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",b={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var r=e.components,s=e.mdxType,u=e.originalType,p=e.parentName,l=a(e,["components","mdxType","originalType","parentName"]),c=i(r),d=s,k=c["".concat(p,".").concat(d)]||c[d]||b[d]||u;return r?n.createElement(k,o(o({ref:t},l),{},{components:r})):n.createElement(k,o({ref:t},l))}));function k(e,t){var r=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var u=r.length,o=new Array(u);o[0]=d;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[c]="string"==typeof e?e:s,o[1]=a;for(var i=2;i{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>b,frontMatter:()=>u,metadata:()=>a,toc:()=>i});var n=r(7462),s=(r(7294),r(3905));const u={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,a={unversionedId:"setup-kubernetes/kubernetes",id:"version-1.0/setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/versioned_docs/version-1.0/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/docs/1.0/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/docs/1.0/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/docs/1.0/setup-kubernetes/install-prerequisite"}},p={},i=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],l={toc:i},c="wrapper";function b(e){let{components:t,...r}=e;return(0,s.kt)(c,(0,n.Z)({},l,r,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,s.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \ucc98\uc74c \ubc30\uc6b0\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c \uccab \uc9c4\uc785 \uc7a5\ubcbd\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc2e4\uc2b5 \ud658\uacbd\uc744 \uad6c\ucd95\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,s.kt)("p",null,"\ud504\ub85c\ub355\uc158 \ub808\ubca8\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \uc218 \uc788\uac8c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\ub294 \ub3c4\uad6c\ub294 kubeadm \uc774\uc9c0\ub9cc, \uc0ac\uc6a9\uc790\ub4e4\uc774 \uc870\uae08 \ub354 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 kubespray, kops \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud558\uba70, \ud559\uc2b5 \ubaa9\uc801\uc744 \uc704\ud574\uc11c \ucef4\ud329\ud2b8\ud55c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc815\ub9d0 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 k3s, minikube, microk8s, kind \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,s.kt)("p",null,"\uac01\uac01\uc758 \ub3c4\uad6c\ub294 \uc7a5\ub2e8\uc810\uc774 \ub2e4\ub974\uae30\uc5d0 \uc0ac\uc6a9\uc790\ub9c8\ub2e4 \uc120\ud638\ud558\ub294 \ub3c4\uad6c\uac00 \ub2e4\ub978 \uc810\uc744 \uace0\ub824\ud558\uc5ec, \ubcf8 \uae00\uc5d0\uc11c\ub294 kubeadm, k3s, minikube\uc758 3\uac00\uc9c0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.\n\uac01 \ub3c4\uad6c\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ube44\uad50\ub294 \ub2e4\uc74c \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,s.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"\uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,s.kt)("p",null,(0,s.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \ud234\uc740 ",(0,s.kt)("strong",{parentName:"p"},"k3s"),"\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c \uc27d\uac8c \ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.",(0,s.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \ub178\ub4dc \uad6c\uc131\uae4c\uc9c0 \ud65c\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 ",(0,s.kt)("strong",{parentName:"p"},"kubeadm"),"\uc744 \uad8c\uc7a5\ud574 \ub4dc\ub9bd\ub2c8\ub2e4.",(0,s.kt)("br",{parentName:"p"}),"\n",(0,s.kt)("strong",{parentName:"p"},"minikube")," \ub294 \uc800\ud76c\uac00 \uc124\uba85\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \uc678\uc5d0\ub3c4 \ub2e4\ub978 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c add-on \ud615\uc2dd\uc73c\ub85c \uc27d\uac8c \uc124\uce58\ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,s.kt)("p",null,"\ubcf8 ",(0,s.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uad6c\ucd95\ud558\uac8c \ub420 MLOps \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 \uc6d0\ud65c\ud788 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c, \ucd94\uac00\ub85c \uc124\uc815\ud574 \uc8fc\uc5b4\uc57c \ud558\ub294 \ubd80\ubd84\uc774 \ucd94\uac00\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,s.kt)("p",null,"Ubuntu OS\uae4c\uc9c0\ub294 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud0d1\uc744 k8s cluster\ub85c \uad6c\ucd95\ud55c \ub4a4, \uc678\ubd80 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud558\ub294 \uac83\uae4c\uc9c0\uac00 \ubcf8 ",(0,s.kt)("strong",{parentName:"p"},"Setup Kubernetes"),"\ub2e8\uc6d0\uc758 \ubc94\uc704\uc785\ub2c8\ub2e4."),(0,s.kt)("p",null,"\uc790\uc138\ud55c \uad6c\ucd95 \ubc29\ubc95\uc740 3\uac00\uc9c0 \ub3c4\uad6c\ub9c8\ub2e4 \ub2e4\ub974\uae30\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc740 \ud750\ub984\uc73c\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,s.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \ubc18\ub4dc\uc2dc \ubaa8\ub4e0 \ub3c4\uad6c\ub97c \uc0ac\uc6a9\ud574 \ubcfc \ud544\uc694\ub294 \uc5c6\uc73c\uba70, \uc774 \uc911 \uc5ec\ub7ec\ubd84\uc774 \uc775\uc219\ud558\uc2e0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc8fc\uc2dc\uba74 \ucda9\ubd84\ud569\ub2c8\ub2e4."))}b.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6661],{3905:(e,t,r)=>{r.d(t,{Zo:()=>l,kt:()=>k});var n=r(7294);function s(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function u(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(s[r]=e[r]);return s}(e,t);if(Object.getOwnPropertySymbols){var u=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(s[r]=e[r])}return s}var p=n.createContext({}),i=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},l=function(e){var t=i(e.components);return n.createElement(p.Provider,{value:t},e.children)},c="mdxType",b={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var r=e.components,s=e.mdxType,u=e.originalType,p=e.parentName,l=a(e,["components","mdxType","originalType","parentName"]),c=i(r),d=s,k=c["".concat(p,".").concat(d)]||c[d]||b[d]||u;return r?n.createElement(k,o(o({ref:t},l),{},{components:r})):n.createElement(k,o({ref:t},l))}));function k(e,t){var r=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var u=r.length,o=new Array(u);o[0]=d;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[c]="string"==typeof e?e:s,o[1]=a;for(var i=2;i{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>b,frontMatter:()=>u,metadata:()=>a,toc:()=>i});var n=r(7462),s=(r(7294),r(3905));const u={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,a={unversionedId:"setup-kubernetes/kubernetes",id:"version-1.0/setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/versioned_docs/version-1.0/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/docs/1.0/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/docs/1.0/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/docs/1.0/setup-kubernetes/install-prerequisite"}},p={},i=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],l={toc:i},c="wrapper";function b(e){let{components:t,...r}=e;return(0,s.kt)(c,(0,n.Z)({},l,r,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,s.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \ucc98\uc74c \ubc30\uc6b0\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c \uccab \uc9c4\uc785 \uc7a5\ubcbd\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc2e4\uc2b5 \ud658\uacbd\uc744 \uad6c\ucd95\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,s.kt)("p",null,"\ud504\ub85c\ub355\uc158 \ub808\ubca8\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \uc218 \uc788\uac8c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\ub294 \ub3c4\uad6c\ub294 kubeadm \uc774\uc9c0\ub9cc, \uc0ac\uc6a9\uc790\ub4e4\uc774 \uc870\uae08 \ub354 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 kubespray, kops \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud558\uba70, \ud559\uc2b5 \ubaa9\uc801\uc744 \uc704\ud574\uc11c \ucef4\ud329\ud2b8\ud55c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uc815\ub9d0 \uc27d\uac8c \uad6c\ucd95\ud560 \uc218 \uc788\ub3c4\ub85d \ub3c4\uc640\uc8fc\ub294 k3s, minikube, microk8s, kind \ub4f1\uc758 \ub3c4\uad6c\ub3c4 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,s.kt)("p",null,"\uac01\uac01\uc758 \ub3c4\uad6c\ub294 \uc7a5\ub2e8\uc810\uc774 \ub2e4\ub974\uae30\uc5d0 \uc0ac\uc6a9\uc790\ub9c8\ub2e4 \uc120\ud638\ud558\ub294 \ub3c4\uad6c\uac00 \ub2e4\ub978 \uc810\uc744 \uace0\ub824\ud558\uc5ec, \ubcf8 \uae00\uc5d0\uc11c\ub294 kubeadm, k3s, minikube\uc758 3\uac00\uc9c0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.\n\uac01 \ub3c4\uad6c\uc5d0 \ub300\ud55c \uc790\uc138\ud55c \ube44\uad50\ub294 \ub2e4\uc74c \ucfe0\ubc84\ub124\ud2f0\uc2a4 ",(0,s.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"\uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,s.kt)("p",null,(0,s.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uad8c\uc7a5\ud558\ub294 \ud234\uc740 ",(0,s.kt)("strong",{parentName:"p"},"k3s"),"\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c \uc27d\uac8c \ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.",(0,s.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \ub178\ub4dc \uad6c\uc131\uae4c\uc9c0 \ud65c\uc6a9\ud558\uace0 \uc2f6\ub2e4\uba74 ",(0,s.kt)("strong",{parentName:"p"},"kubeadm"),"\uc744 \uad8c\uc7a5\ud574 \ub4dc\ub9bd\ub2c8\ub2e4.",(0,s.kt)("br",{parentName:"p"}),"\n",(0,s.kt)("strong",{parentName:"p"},"minikube")," \ub294 \uc800\ud76c\uac00 \uc124\uba85\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \uc678\uc5d0\ub3c4 \ub2e4\ub978 \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c add-on \ud615\uc2dd\uc73c\ub85c \uc27d\uac8c \uc124\uce58\ud560 \uc218 \uc788\ub2e4\ub294 \uc7a5\uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,s.kt)("p",null,"\ubcf8 ",(0,s.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uad6c\ucd95\ud558\uac8c \ub420 MLOps \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 \uc6d0\ud65c\ud788 \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud560 \ub54c, \ucd94\uac00\ub85c \uc124\uc815\ud574 \uc8fc\uc5b4\uc57c \ud558\ub294 \ubd80\ubd84\uc774 \ucd94\uac00\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,s.kt)("p",null,"Ubuntu OS\uae4c\uc9c0\ub294 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud0d1\uc744 k8s cluster\ub85c \uad6c\ucd95\ud55c \ub4a4, \uc678\ubd80 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc811\uadfc\ud558\ub294 \uac83\uc744 \ud655\uc778\ud558\ub294 \uac83\uae4c\uc9c0\uac00 \ubcf8 ",(0,s.kt)("strong",{parentName:"p"},"Setup Kubernetes"),"\ub2e8\uc6d0\uc758 \ubc94\uc704\uc785\ub2c8\ub2e4."),(0,s.kt)("p",null,"\uc790\uc138\ud55c \uad6c\ucd95 \ubc29\ubc95\uc740 3\uac00\uc9c0 \ub3c4\uad6c\ub9c8\ub2e4 \ub2e4\ub974\uae30\uc5d0 \ub2e4\uc74c\uacfc \uac19\uc740 \ud750\ub984\uc73c\ub85c \uad6c\uc131\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,s.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uac01\uac01\uc758 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \ubc18\ub4dc\uc2dc \ubaa8\ub4e0 \ub3c4\uad6c\ub97c \uc0ac\uc6a9\ud574 \ubcfc \ud544\uc694\ub294 \uc5c6\uc73c\uba70, \uc774 \uc911 \uc5ec\ub7ec\ubd84\uc774 \uc775\uc219\ud558\uc2e0 \ub3c4\uad6c\ub97c \ud65c\uc6a9\ud574\uc8fc\uc2dc\uba74 \ucda9\ubd84\ud569\ub2c8\ub2e4."))}b.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c07e8a62.232aef8c.js b/assets/js/c07e8a62.b219d7bb.js similarity index 99% rename from assets/js/c07e8a62.232aef8c.js rename to assets/js/c07e8a62.b219d7bb.js index 01d11fae..75a53a5d 100644 --- a/assets/js/c07e8a62.232aef8c.js +++ b/assets/js/c07e8a62.b219d7bb.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1800],{3905:(e,t,r)=>{r.d(t,{Zo:()=>k,kt:()=>s});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var p=n.createContext({}),c=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},k=function(e){var t=c(e.components);return n.createElement(p.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),m=c(r),u=a,s=m["".concat(p,".").concat(u)]||m[u]||d[u]||l;return r?n.createElement(s,o(o({ref:t},k),{},{components:r})):n.createElement(s,o({ref:t},k))}));function s(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[m]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const l={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/images",id:"version-1.0/prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/docs/1.0/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/images.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/docs/1.0/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/docs/1.0/prerequisites/docker/advanced"}},p={},c=[{value:"1. Dockerfile \ub9cc\ub4e4\uae30",id:"1-dockerfile-\ub9cc\ub4e4\uae30",level:2},{value:"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4",id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4",level:2},{value:"FROM",id:"from",level:3},{value:"COPY",id:"copy",level:3},{value:"RUN",id:"run",level:3},{value:"CMD",id:"cmd",level:3},{value:"WORKDIR",id:"workdir",level:3},{value:"ENV",id:"env",level:3},{value:"EXPOSE",id:"expose",level:3},{value:"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30",id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30",level:2},{value:"4. Docker build from Dockerfile",id:"4-docker-build-from-dockerfile",level:2},{value:"5. Docker run from Dockerfile",id:"5-docker-run-from-dockerfile",level:2},{value:"6. Docker run with env",id:"6-docker-run-with-env",level:2}],k={toc:c},m="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},k,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-dockerfile-\ub9cc\ub4e4\uae30"},"1. Dockerfile \ub9cc\ub4e4\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 \uac00\uc7a5 \uc26c\uc6b4 \ubc29\ubc95\uc740 \ub3c4\ucee4\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf\uc778 Dockerfile\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc678\uc5d0\ub294 running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker commit")," \ub4f1\uc744 \ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\uc790\uac00 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc27d\uac8c \ub9cc\ub4e4 \uc218 \uc788\ub3c4\ub85d, \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf"),(0,a.kt)("li",{parentName:"ul"},"\ud30c\uc77c\uba85\uc740 \uaf2d ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc774 \uc544\ub2c8\uc5b4\ub3c4 \uc0c1\uad00\uc5c6\uc9c0\ub9cc, ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \uc218\ud589 \uc2dc, default \ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc77c\uba85\uc774 ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ul"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \ub97c \uc218\ud589\ud560 \ub54c, ",(0,a.kt)("inlineCode",{parentName:"li"},"-f")," \uc635\uc158\uc744 \uc8fc\uba74 \ub2e4\ub978 \ud30c\uc77c\uba85\uc73c\ub85c\ub3c4 \uc0ac\uc6a9 \uac00\ub2a5\ud569\ub2c8\ub2e4.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"ex) ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build -f dockerfile-asdf .")," \ub3c4 \uac00\ub2a5")))))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc2e4\uc2b5\uc744 \uc704\ud574\uc11c \ud3b8\ud55c \ub514\ub809\ud1a0\ub9ac\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd \n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ub77c\ub294 \uc774\ub984\uc758 \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Dockerfile \uc774\ub77c\ub294 \ube48 \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"touch Dockerfile\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")))),(0,a.kt)("h2",{id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4"},"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4"),(0,a.kt)("p",null,"Dockerfile \uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8\uc801\uc778 \uba85\ub839\uc5b4\uc5d0 \ub300\ud574\uc11c \ud558\ub098\uc529 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"from"},"FROM"),(0,a.kt)("p",null,"Dockerfile \uc774 base image \ub85c \uc5b4\ub5a0\ud55c \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c, \uc544\ubb34\uac83\ub3c4 \uc5c6\ub294 \ube48 \ud658\uacbd\uc5d0\uc11c\ubd80\ud130 \ud558\ub098\ud558\ub098\uc529 \uc81c\uac00 \uc758\ub3c4\ud55c \ud658\uacbd\uc744 \ub9cc\ub4e4\uc5b4\uac00\ub294\uac8c \uc544\ub2c8\ub77c, python 3.9 \ubc84\uc804\uc774 \uc124\uce58\ub41c \ud658\uacbd\uc744 \ubca0\uc774\uc2a4\ub85c\ud574\ub450\uace0, \uc800\ub294 pytorch \ub97c \uc124\uce58\ud558\uace0, \uc81c \uc18c\uc2a4\ucf54\ub4dc\ub9cc \ub123\uc5b4\ub450\ub294 \ud615\ud0dc\ub85c \ud65c\uc6a9\ud560 \uc218\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \uacbd\uc6b0\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", ... \ub4f1\uc758 \uc798 \ub9cc\ub4e4\uc5b4\uc9c4 \uc774\ubbf8\uc9c0\ub97c \ubca0\uc774\uc2a4\ub85c \ud65c\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,a.kt)("h3",{id:"copy"},"COPY"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"host(\ub85c\uceec)\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc758 \ud30c\uc77c \ud639\uc740 \ub514\ub809\ud1a0\ub9ac\ub97c ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc5d0 \ubcf5\uc0ac\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ADD")," \ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uc640 \ube44\uc2b7\ud558\uc9c0\ub9cc \ucd94\uac00\uc801\uc778 \uae30\ub2a5\uc744 \ud488\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,a.kt)("h3",{id:"run"},"RUN"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \ud574\ub2f9 \ucee4\ub9e8\ub4dc\ub4e4\uc774 \uc2e4\ud589\ub41c \uc0c1\ud0dc\ub97c \uc720\uc9c0\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,a.kt)("h3",{id:"cmd"},"CMD"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 ",(0,a.kt)("strong",{parentName:"p"},"\uc2dc\uc791\ub420 \ub54c"),", \uc2e4\ud589\ud558\ub294 \uac83\uc744 \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud558\ub294 \uba85\ub839\uc5b4\ub85c ",(0,a.kt)("strong",{parentName:"p"},"ENTRYPOINT")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub458\uc758 \ucc28\uc774\uc5d0 \ub300\ud574\uc11c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ub4a4\uc5d0\uc11c")," \ub2e4\ub8f9\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\ub098\uc758 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc5d0\uc11c\ub294 \ud558\ub098\uc758 ",(0,a.kt)("strong",{parentName:"p"},"CMD")," \ub9cc \uc2e4\ud589\ud560 \uc218 \uc788\ub2e4\ub294 \uc810\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"RUN")," \uba85\ub839\uc5b4\uc640 \ub2e4\ub985\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,a.kt)("h3",{id:"workdir"},"WORKDIR"),(0,a.kt)("p",null,"\uc774\ud6c4 \ucd94\uac00\ub420 \uba85\ub839\uc5b4\ub97c \ucee8\ud14c\uc774\ub108 \ub0b4\uc758 \uc5b4\ub5a4 \ub514\ub809\ud1a0\ub9ac\uc5d0\uc11c \uc218\ud589\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud574\ub2f9 \ub514\ub809\ud1a0\ub9ac\uac00 \uc5c6\ub2e4\uba74 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,a.kt)("h3",{id:"env"},"ENV"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc9c0\uc18d\uc801\uc73c\ub85c \uc0ac\uc6a9\ub420 environment variable \uc758 \uac12\uc744 \uc124\uc815\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,a.kt)("h3",{id:"expose"},"EXPOSE"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub6ab\uc5b4\uc904 \ud3ec\ud2b8/\ud504\ub85c\ud1a0\ucf5c\uc744 \uc9c0\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("inlineCode",{parentName:"p"},"")," \uc744 \uc9c0\uc815\ud558\uc9c0 \uc54a\uc73c\uba74 TCP \uac00 \ub514\ud3f4\ud2b8\ub85c \uc124\uc815\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,a.kt)("h2",{id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30"},"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," \ud639\uc740 vscode \ub4f1 \ubcf8\uc778\uc774 \uc0ac\uc6a9\ud558\ub294 \ud3b8\uc9d1\uae30\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,a.kt)("h2",{id:"4-docker-build-from-dockerfile"},"4. Docker build from Dockerfile"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker build")," \uba85\ub839\uc5b4\ub85c Dockerfile \ub85c\ubd80\ud130 Docker Image \ub97c \ub9cc\ub4e4\uc5b4\ubd05\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,a.kt)("p",null,"Dockerfile \uc774 \uc788\ub294 \uacbd\ub85c\uc5d0\uc11c \ub2e4\uc74c \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,a.kt)("p",null,"\uc704 \ucee4\ub9e8\ub4dc\ub97c \uc124\uba85\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},".")," : ",(0,a.kt)("strong",{parentName:"li"},"\ud604\uc7ac \uacbd\ub85c"),"\uc5d0 \uc788\ub294 Dockerfile \ub85c\ubd80\ud130"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"-t")," : my-image \ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ub984"),"\uacfc v1.0.0 \uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\ud0dc\uadf8"),"\ub85c ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ubbf8\uc9c0"),"\ub97c"),(0,a.kt)("li",{parentName:"ul"},"\ube4c\ub4dc\ud558\uaca0\ub2e4\ub77c\ub294 \uba85\ub839\uc5b4")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc774\ubbf8\uc9c0 \ube4c\ub4dc\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,a.kt)("h2",{id:"5-docker-run-from-dockerfile"},"5. Docker run from Dockerfile"),(0,a.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub85c docker \ucee8\ud14c\uc774\ub108\ub97c ",(0,a.kt)("strong",{parentName:"p"},"run")," \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,a.kt)("h2",{id:"6-docker-run-with-env"},"6. Docker run with env"),(0,a.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub97c \uc2e4\ud589\ud558\ub294 \uc2dc\uc810\uc5d0, ",(0,a.kt)("inlineCode",{parentName:"p"},"TEST")," env var \uc758 \uac12\uc744 \ubcc0\uacbd\ud558\uc5ec docker \ucee8\ud14c\uc774\ub108\ub97c run \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1800],{3905:(e,t,r)=>{r.d(t,{Zo:()=>k,kt:()=>s});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var p=n.createContext({}),c=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},k=function(e){var t=c(e.components);return n.createElement(p.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},u=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),m=c(r),u=a,s=m["".concat(p,".").concat(u)]||m[u]||d[u]||l;return r?n.createElement(s,o(o({ref:t},k),{},{components:r})):n.createElement(s,o({ref:t},k))}));function s(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[m]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const l={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/images",id:"version-1.0/prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/docs/1.0/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/images.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/docs/1.0/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/docs/1.0/prerequisites/docker/advanced"}},p={},c=[{value:"1. Dockerfile \ub9cc\ub4e4\uae30",id:"1-dockerfile-\ub9cc\ub4e4\uae30",level:2},{value:"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4",id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4",level:2},{value:"FROM",id:"from",level:3},{value:"COPY",id:"copy",level:3},{value:"RUN",id:"run",level:3},{value:"CMD",id:"cmd",level:3},{value:"WORKDIR",id:"workdir",level:3},{value:"ENV",id:"env",level:3},{value:"EXPOSE",id:"expose",level:3},{value:"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30",id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30",level:2},{value:"4. Docker build from Dockerfile",id:"4-docker-build-from-dockerfile",level:2},{value:"5. Docker run from Dockerfile",id:"5-docker-run-from-dockerfile",level:2},{value:"6. Docker run with env",id:"6-docker-run-with-env",level:2}],k={toc:c},m="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},k,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-dockerfile-\ub9cc\ub4e4\uae30"},"1. Dockerfile \ub9cc\ub4e4\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 \uac00\uc7a5 \uc26c\uc6b4 \ubc29\ubc95\uc740 \ub3c4\ucee4\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf\uc778 Dockerfile\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc678\uc5d0\ub294 running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker commit")," \ub4f1\uc744 \ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\uc790\uac00 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \uc27d\uac8c \ub9cc\ub4e4 \uc218 \uc788\ub3c4\ub85d, \uc81c\uacf5\ud558\ub294 \ud15c\ud50c\ub9bf"),(0,a.kt)("li",{parentName:"ul"},"\ud30c\uc77c\uba85\uc740 \uaf2d ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc774 \uc544\ub2c8\uc5b4\ub3c4 \uc0c1\uad00\uc5c6\uc9c0\ub9cc, ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \uc218\ud589 \uc2dc, default \ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc77c\uba85\uc774 ",(0,a.kt)("inlineCode",{parentName:"li"},"Dockerfile")," \uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ul"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4dc\ub294 ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build")," \ub97c \uc218\ud589\ud560 \ub54c, ",(0,a.kt)("inlineCode",{parentName:"li"},"-f")," \uc635\uc158\uc744 \uc8fc\uba74 \ub2e4\ub978 \ud30c\uc77c\uba85\uc73c\ub85c\ub3c4 \uc0ac\uc6a9 \uac00\ub2a5\ud569\ub2c8\ub2e4.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"ex) ",(0,a.kt)("inlineCode",{parentName:"li"},"docker build -f dockerfile-asdf .")," \ub3c4 \uac00\ub2a5")))))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc2e4\uc2b5\uc744 \uc704\ud574\uc11c \ud3b8\ud55c \ub514\ub809\ud1a0\ub9ac\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd \n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ub77c\ub294 \uc774\ub984\uc758 \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"docker-practice \ud3f4\ub354\ub85c \uc774\ub3d9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cd docker-practice\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Dockerfile \uc774\ub77c\ub294 \ube48 \ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"touch Dockerfile\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc815\uc0c1\uc801\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")))),(0,a.kt)("h2",{id:"2-dockerfile-\ub0b4\uc7a5-\uba85\ub839\uc5b4"},"2. Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4"),(0,a.kt)("p",null,"Dockerfile \uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ubcf8\uc801\uc778 \uba85\ub839\uc5b4\uc5d0 \ub300\ud574\uc11c \ud558\ub098\uc529 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"from"},"FROM"),(0,a.kt)("p",null,"Dockerfile \uc774 base image \ub85c \uc5b4\ub5a0\ud55c \uc774\ubbf8\uc9c0\ub97c \uc0ac\uc6a9\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c, \uc544\ubb34\uac83\ub3c4 \uc5c6\ub294 \ube48 \ud658\uacbd\uc5d0\uc11c\ubd80\ud130 \ud558\ub098\ud558\ub098\uc529 \uc81c\uac00 \uc758\ub3c4\ud55c \ud658\uacbd\uc744 \ub9cc\ub4e4\uc5b4\uac00\ub294\uac8c \uc544\ub2c8\ub77c, python 3.9 \ubc84\uc804\uc774 \uc124\uce58\ub41c \ud658\uacbd\uc744 \ubca0\uc774\uc2a4\ub85c\ud574\ub450\uace0, \uc800\ub294 pytorch \ub97c \uc124\uce58\ud558\uace0, \uc81c \uc18c\uc2a4\ucf54\ub4dc\ub9cc \ub123\uc5b4\ub450\ub294 \ud615\ud0dc\ub85c \ud65c\uc6a9\ud560 \uc218\uac00 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \uacbd\uc6b0\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", ... \ub4f1\uc758 \uc798 \ub9cc\ub4e4\uc5b4\uc9c4 \uc774\ubbf8\uc9c0\ub97c \ubca0\uc774\uc2a4\ub85c \ud65c\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,a.kt)("h3",{id:"copy"},"COPY"),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"host(\ub85c\uceec)\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc758 \ud30c\uc77c \ud639\uc740 \ub514\ub809\ud1a0\ub9ac\ub97c ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c\uc758 ",(0,a.kt)("inlineCode",{parentName:"strong"},""))," \uacbd\ub85c\uc5d0 \ubcf5\uc0ac\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ADD")," \ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uc640 \ube44\uc2b7\ud558\uc9c0\ub9cc \ucd94\uac00\uc801\uc778 \uae30\ub2a5\uc744 \ud488\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,a.kt)("h3",{id:"run"},"RUN"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc2e4\ud589\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \ud574\ub2f9 \ucee4\ub9e8\ub4dc\ub4e4\uc774 \uc2e4\ud589\ub41c \uc0c1\ud0dc\ub97c \uc720\uc9c0\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,a.kt)("h3",{id:"cmd"},"CMD"),(0,a.kt)("p",null,"\uba85\uc2dc\ud55c \ucee4\ub9e8\ub4dc\ub97c \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 ",(0,a.kt)("strong",{parentName:"p"},"\uc2dc\uc791\ub420 \ub54c"),", \uc2e4\ud589\ud558\ub294 \uac83\uc744 \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud558\ub294 \uba85\ub839\uc5b4\ub85c ",(0,a.kt)("strong",{parentName:"p"},"ENTRYPOINT")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub458\uc758 \ucc28\uc774\uc5d0 \ub300\ud574\uc11c\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\ub4a4\uc5d0\uc11c")," \ub2e4\ub8f9\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\ub098\uc758 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc5d0\uc11c\ub294 \ud558\ub098\uc758 ",(0,a.kt)("strong",{parentName:"p"},"CMD")," \ub9cc \uc2e4\ud589\ud560 \uc218 \uc788\ub2e4\ub294 \uc810\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"RUN")," \uba85\ub839\uc5b4\uc640 \ub2e4\ub985\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,a.kt)("h3",{id:"workdir"},"WORKDIR"),(0,a.kt)("p",null,"\uc774\ud6c4 \ucd94\uac00\ub420 \uba85\ub839\uc5b4\ub97c \ucee8\ud14c\uc774\ub108 \ub0b4\uc758 \uc5b4\ub5a4 \ub514\ub809\ud1a0\ub9ac\uc5d0\uc11c \uc218\ud589\ud560 \uac83\uc778\uc9c0\ub97c \uba85\uc2dc\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud574\ub2f9 \ub514\ub809\ud1a0\ub9ac\uac00 \uc5c6\ub2e4\uba74 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,a.kt)("h3",{id:"env"},"ENV"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc9c0\uc18d\uc801\uc73c\ub85c \uc0ac\uc6a9\ub420 environment variable \uc758 \uac12\uc744 \uc124\uc815\ud558\ub294 \uba85\ub839\uc5b4\uc785\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,a.kt)("h3",{id:"expose"},"EXPOSE"),(0,a.kt)("p",null,"\ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub6ab\uc5b4\uc904 \ud3ec\ud2b8/\ud504\ub85c\ud1a0\ucf5c\uc744 \uc9c0\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("inlineCode",{parentName:"p"},"")," \uc744 \uc9c0\uc815\ud558\uc9c0 \uc54a\uc73c\uba74 TCP \uac00 \ub514\ud3f4\ud2b8\ub85c \uc124\uc815\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,a.kt)("h2",{id:"3-\uac04\ub2e8\ud55c-dockerfile-\uc791\uc131\ud574\ubcf4\uae30"},"3. \uac04\ub2e8\ud55c Dockerfile \uc791\uc131\ud574\ubcf4\uae30"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," \ud639\uc740 vscode \ub4f1 \ubcf8\uc778\uc774 \uc0ac\uc6a9\ud558\ub294 \ud3b8\uc9d1\uae30\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud574\uc90d\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,a.kt)("h2",{id:"4-docker-build-from-dockerfile"},"4. Docker build from Dockerfile"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker build")," \uba85\ub839\uc5b4\ub85c Dockerfile \ub85c\ubd80\ud130 Docker Image \ub97c \ub9cc\ub4e4\uc5b4\ubd05\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,a.kt)("p",null,"Dockerfile \uc774 \uc788\ub294 \uacbd\ub85c\uc5d0\uc11c \ub2e4\uc74c \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,a.kt)("p",null,"\uc704 \ucee4\ub9e8\ub4dc\ub97c \uc124\uba85\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},".")," : ",(0,a.kt)("strong",{parentName:"li"},"\ud604\uc7ac \uacbd\ub85c"),"\uc5d0 \uc788\ub294 Dockerfile \ub85c\ubd80\ud130"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"-t")," : my-image \ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ub984"),"\uacfc v1.0.0 \uc774\ub77c\ub294 ",(0,a.kt)("strong",{parentName:"li"},"\ud0dc\uadf8"),"\ub85c ",(0,a.kt)("strong",{parentName:"li"},"\uc774\ubbf8\uc9c0"),"\ub97c"),(0,a.kt)("li",{parentName:"ul"},"\ube4c\ub4dc\ud558\uaca0\ub2e4\ub77c\ub294 \uba85\ub839\uc5b4")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc774\ubbf8\uc9c0 \ube4c\ub4dc\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,a.kt)("h2",{id:"5-docker-run-from-dockerfile"},"5. Docker run from Dockerfile"),(0,a.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub85c docker \ucee8\ud14c\uc774\ub108\ub97c ",(0,a.kt)("strong",{parentName:"p"},"run")," \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,a.kt)("h2",{id:"6-docker-run-with-env"},"6. Docker run with env"),(0,a.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 \ubc29\uae08 \ube4c\ub4dc\ud55c ",(0,a.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," \uc774\ubbf8\uc9c0\ub97c \uc2e4\ud589\ud558\ub294 \uc2dc\uc810\uc5d0, ",(0,a.kt)("inlineCode",{parentName:"p"},"TEST")," env var \uc758 \uac12\uc744 \ubcc0\uacbd\ud558\uc5ec docker \ucee8\ud14c\uc774\ub108\ub97c run \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub41c\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c1115317.fc7a8656.js b/assets/js/c1115317.64871be4.js similarity index 99% rename from assets/js/c1115317.fc7a8656.js rename to assets/js/c1115317.64871be4.js index 3b7ef034..3b1dab4c 100644 --- a/assets/js/c1115317.fc7a8656.js +++ b/assets/js/c1115317.64871be4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6639],{3905:(e,t,n)=>{n.d(t,{Zo:()=>k,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),c=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},k=function(e){var t=c(e.components);return r.createElement(p.Provider,{value:t},e.children)},d="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),d=c(n),u=a,m=d["".concat(p,".").concat(u)]||d[u]||s[u]||l;return n?r.createElement(m,o(o({ref:t},k),{},{components:n})):r.createElement(m,o({ref:t},k))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>s,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var r=n(7462),a=(n(7294),n(3905));const l={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/advanced",id:"version-1.0/prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/docs/1.0/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/advanced.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/docs/1.0/prerequisites/docker/images"}},p={},c=[{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30",level:2},{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Docker tag \uc774\ub984 \uc9d3\uae30",id:"docker-tag-\uc774\ub984-\uc9d3\uae30",level:3},{value:"ETC",id:"etc",level:3},{value:"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158",id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158",level:2},{value:"docker run with volume",id:"docker-run-with-volume",level:3},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"docker run as a background process",id:"docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],k={toc:c},d="wrapper";function s(e){let{components:t,...l}=e;return(0,a.kt)(d,(0,r.Z)({},k,l,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30"),(0,a.kt)("h3",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810"),(0,a.kt)("p",null,"Dockerfile \uc744 \ud65c\uc6a9\ud558\uc5ec \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c\ub294 \uba85\ub839\uc5b4\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\uc21c\uc11c"),"\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \uc5ec\ub7ec \uac1c\uc758 Read-Only Layer \ub85c \uad6c\uc131\ub418\uc5b4\uc788\uace0, \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud560 \ub54c \uc774\ubbf8 \uc874\uc7ac\ud558\ub294 \ub808\uc774\uc5b4\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uce90\uc2dc\ub418\uc5b4")," \uc7ac\uc0ac\uc6a9\ub418\uae30 \ub54c\ubb38\uc5d0, \uc774\ub97c \uc0dd\uac01\ud574\uc11c Dockerfile \uc744 \uad6c\uc131\ud55c\ub2e4\uba74 ",(0,a.kt)("strong",{parentName:"p"},"\ube4c\ub4dc \uc2dc\uac04\uc744 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"Dockerfile\uc5d0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uba85\ub839\uc5b4 \ud558\ub098\uac00 \ud558\ub098\uc758 \ub808\uc774\uc5b4\ub85c \uc800\uc7a5\ub429\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\ub85c \ube4c\ub4dc\ub41c \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash")," \uba85\ub839\uc5b4\ub85c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ub808\uc774\uc5b4\ub85c \ud45c\ud604\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"layers.png",src:n(3116).Z,width:"1080",height:"612"})),(0,a.kt)("p",null,"\ucd5c\uc0c1\ub2e8\uc758 R/W Layer \ub294 \uc774\ubbf8\uc9c0\uc5d0 \uc601\ud5a5\uc744 \uc8fc\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ub0b4\uc5ed\uc740 \ubaa8\ub450 \ud718\ubc1c\uc131\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub2e8\uc758 \ub808\uc774\uc5b4\uac00 \ubcc0\uacbd\ub418\uba74, \uadf8 \uc704\uc758 \ub808\uc774\uc5b4\ub294 \ubaa8\ub450 \uc0c8\ub85c \ube4c\ub4dc\ub429\ub2c8\ub2e4. \uadf8\ub798\uc11c Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4\uc758 \uc21c\uc11c\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uba74, ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc \ubcc0\uacbd"),"\ub418\ub294 \ubd80\ubd84\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ucd5c\ub300\ud55c \ub4a4\ucabd\uc73c\ub85c")," \uc815\ub82c\ud558\ub294 \uac83\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. (ex. ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,a.kt)("p",null,"\uadf8\ub807\uae30 \ub54c\ubb38\uc5d0 \ubc18\ub300\ub85c \ubcc0\uacbd\ub418\uc9c0 \uc54a\ub294 \ubd80\ubd84\uc740 \ucd5c\ub300\ud55c \uc55e\ucabd\uc73c\ub85c \uc815\ub82c\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub9cc\uc57d \uac70\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\ubcc0\uacbd\ub418\uc9c0 \uc54a\uc9c0\ub9cc"),", \uc5ec\ub7ec \uacf3\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc")," \uc4f0\uc774\ub294 \ubd80\ubd84\uc744 \uacf5\ud1b5\ud654\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud574\ub2f9 \uacf5\ud1b5\ubd80\ubd84\ub9cc \ubb36\uc5b4\uc11c \ubcc4\ub3c4\uc758 \uc774\ubbf8\uc9c0\ub294 \ubbf8\ub9ac \ub9cc\ub4e4\uc5b4\ub454 \ub2e4\uc74c, ",(0,a.kt)("strong",{parentName:"p"},"\ubca0\uc774\uc2a4 \uc774\ubbf8\uc9c0")," \ub85c \ud65c\uc6a9\ud558\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\ub978 \uac74 \uac70\uc758 \ub611\uac19\uc740\ub370, tensorflow-cpu \ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\ubbf8\uc9c0\uc640, tensorflow-gpu \ub97c \uc0ac\uc6a9\ud558\ub294 \ud658\uacbd\uc744 \ubd84\ub9ac\ud574\uc11c \uc774\ubbf8\uc9c0\ub85c \ub9cc\ub4e4\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","python \uacfc \uae30\ud0c0 \uae30\ubcf8\uc801\uc778 \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub41c ",(0,a.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,a.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," \ub97c \ub9cc\ub4e4\uc5b4\ub450\uace0, ",(0,a.kt)("strong",{parentName:"p"},"tensorflow cpu \ubc84\uc804\uacfc gpu \ubc84\uc804\uc774")," \uc124\uce58\ub41c \uc774\ubbf8\uc9c0 \uc0c8\ub85c \ub9cc\ub4e4\ub54c\ub294, \uc704\uc758 \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"FROM")," \uc73c\ub85c \ubd88\ub7ec\uc628 \ub2e4\uc74c, tensorflow install \ud558\ub294 \ubd80\ubd84\ub9cc \ubcc4\ub3c4\ub85c \uc791\uc131\ud574\uc11c Dockerfile \uc744 2 \uac1c\ub85c \uad00\ub9ac\ud55c\ub2e4\uba74 \uac00\ub3c5\uc131\ub3c4 \uc88b\uace0 \ube4c\ub4dc \uc2dc\uac04\ub3c4 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"\ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \ud569\uce58\ub294 \uac83"),"\uc774 Old version \uc758 \ub3c4\ucee4\uc5d0\uc11c\ub294 \uc131\ub2a5 \ud5a5\uc0c1 \ud6a8\uacfc\ub97c \uc774\ub04c\uc5c8\uc2b5\ub2c8\ub2e4. \uc5ec\ub7ec\ubd84\uc758 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 \uc5b4\ub5a4 \ub3c4\ucee4 \ubc84\uc804\uc5d0\uc11c \uc2e4\ud589\ub420 \uac83\uc778\uc9c0 \ubcf4\uc7a5\ud560 \uc218 \uc5c6\uc73c\uba70, ",(0,a.kt)("strong",{parentName:"p"},"\uac00\ub3c5\uc131"),"\uc744 \uc704\ud574\uc11c\ub3c4 \ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \uc801\uc808\ud788 \ud569\uce58\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ub41c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,a.kt)("p",null,"\uc774\ub97c \uc544\ub798\uc640 \uac19\uc774 \ud569\uccd0\uc11c \uc801\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,a.kt)("p",null,"\ud3b8\uc758\ub97c \uc704\ud574\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore")," \ub3c4 \uc0ac\uc6a9\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".gitignore")," \uc640 \ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud55c\ub2e4\uace0 \uc774\ud574\ud558\uba74 \ub429\ub2c8\ub2e4. (git add \ud560 \ub54c \uc81c\uc678\ud560 \uc218 \uc788\ub4ef\uc774, docker build \ud560 \ub54c \uc790\ub3d9\uc73c\ub85c \uc81c\uc678)"),(0,a.kt)("p",null,"\ub354 \ub9ce\uc740 \uc815\ubcf4\ub294 ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker \uacf5\uc2dd \ubb38\uc11c"),"\uc5d0\uc11c \ud655\uc778\ud558\uc2e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \ub294 \ubaa8\ub450 \ucee8\ud14c\uc774\ub108\uc758 \uc2e4\ud589 \uc2dc\uc810\uc5d0\uc11c \uc5b4\ub5a4 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\uc2dc\ud0a4\uace0 \uc2f6\uc744 \ub54c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \uc774 \ub458 \uc911 \ud558\ub098\ub294 \ubc18\ub4dc\uc2dc \uc874\uc7ac\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"\ucc28\uc774\uc810"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD"),": docker run \uc744 \uc218\ud589\ud560 \ub54c, \uc27d\uac8c \ubcc0\uacbd\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc74c"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": ",(0,a.kt)("inlineCode",{parentName:"li"},"--entrypoint")," \ub97c \uc0ac\uc6a9\ud574\uc57c \ubcc0\uacbd\ud560 \uc218 \uc788\uc74c")))),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \uac00 \ud568\uaed8 \uc4f0\uc77c \ub54c\ub294 \ubcf4\ud1b5 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc5d0\uc11c \uc801\uc740 \uba85\ub839\uc758 arguments(parameters) \ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc5d0\uc11c \uc8fc\uc11d\uc73c\ub85c \ud45c\uc2dc\ub41c \ubd80\ubd84\ub4e4\uc744 \ud574\uc81c\ud558\uba70 \ube4c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null}),(0,a.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"NO CMD")),(0,a.kt)("td",{parentName:"tr",align:null},"Error!"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,a.kt)("td",{parentName:"tr",align:null},"x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD x y")),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"In Kubernetes pod",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," \u2192 command"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD")," \u2192 args")))),(0,a.kt)("h3",{id:"docker-tag-\uc774\ub984-\uc9d3\uae30"},"Docker tag \uc774\ub984 \uc9d3\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc758 tag \ub85c ",(0,a.kt)("strong",{parentName:"p"},"latest \ub294 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5"),"\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc720\ub294 latest \ub294 default tag name \uc774\ubbc0\ub85c ",(0,a.kt)("strong",{parentName:"p"},"\uc758\ub3c4\uce58 \uc54a\uac8c overwritten")," \ub418\ub294 \uacbd\uc6b0\uac00 \ub108\ubb34 \ub9ce\uc774 \ubc1c\uc0dd\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub098\uc758 \uc774\ubbf8\uc9c0\ub294 \ud558\ub098\uc758 \ud0dc\uadf8\ub97c \uac00\uc9d0(",(0,a.kt)("strong",{parentName:"p"},"uniqueness"),")\uc744 \ubcf4\uc7a5\ud574\uc57c \ucd94\ud6c4 Production \ub2e8\uacc4\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\ud611\uc5c5/\ub514\ubc84\uae45"),"\uc5d0 \uc6a9\uc774\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub0b4\uc6a9\uc740 \ub2e4\ub974\uc9c0\ub9cc, \ub3d9\uc77c\ud55c tag \ub97c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 \ucd94\ud6c4 dangling image \ub85c \ucde8\uae09\ub418\uc5b4 \uad00\ub9ac\ud558\uae30 \uc5b4\ub824\uc6cc\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","dangling image\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker images"),"\uc5d0\ub294 \ub098\uc624\uc9c0 \uc54a\uc9c0\ub9cc \uacc4\uc18d\ud574\uc11c \uc800\uc7a5\uc18c\ub97c \ucc28\uc9c0\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"etc"},"ETC"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"log \ub4f1\uc758 \uc815\ubcf4\ub294 container \ub0b4\ubd80\uac00 \uc544\ub2cc \uacf3\uc5d0 \ub530\ub85c \uc800\uc7a5\ud569\ub2c8\ub2e4.\ncontainer \ub0b4\ubd80\uc5d0\uc11c write \ud55c data \ub294 \uc5b8\uc81c\ub4e0\uc9c0 \uc0ac\ub77c\uc9c8 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"secret \ud55c \uc815\ubcf4, \ud658\uacbd(dev/prod) dependent \ud55c \uc815\ubcf4 \ub4f1\uc740 Dockerfile \uc5d0 \uc9c1\uc811 \uc801\ub294 \uac8c \uc544\ub2c8\ub77c, env var \ub610\ub294 .env config file \uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"Dockerfile ",(0,a.kt)("strong",{parentName:"li"},"linter")," \ub3c4 \uc874\uc7ac\ud558\ubbc0\ub85c, \ud611\uc5c5 \uc2dc\uc5d0\ub294 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,a.kt)("h2",{id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158"},"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158"),(0,a.kt)("h3",{id:"docker-run-with-volume"},"docker run with volume"),(0,a.kt)("p",null,"Docker container \uc0ac\uc6a9 \uc2dc \ubd88\ud3b8\ud55c \uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.\n\ubc14\ub85c Docker\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ubaa8\ub4e0 \uc0ac\ud56d\uc740 \uc800\uc7a5\ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc774\uc720\ub294 Docker container \ub294 \uac01\uac01 \uaca9\ub9ac\ub41c \ud30c\uc77c\uc2dc\uc2a4\ud15c\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c, ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec docker container \ub07c\ub9ac \ub370\uc774\ud130\ub97c \uacf5\uc720\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c Docker\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"2 \uac00\uc9c0"),"\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"storage.png",src:n(7229).Z,width:"501",height:"255"})),(0,a.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"docker cli \ub97c \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"li"},"volume")," \uc774\ub77c\ub294 \ub9ac\uc18c\uc2a4\ub97c \uc9c1\uc811 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"host \uc5d0\uc11c Docker area(",(0,a.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") \uc544\ub798\uc5d0 \ud2b9\uc815 \ub514\ub809\ud1a0\ub9ac\ub97c \uc0dd\uc131\ud55c \ub2e4\uc74c, \ud574\ub2f9 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"host \uc758 \ud2b9\uc815 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"how-to-use"},"How to use?"),(0,a.kt)("p",null,"\uc0ac\uc6a9 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ub3d9\uc77c\ud55c \uc778\ud130\ud398\uc774\uc2a4"),"\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"-v")," \uc635\uc158\uc744 \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc, volume \uc744 \uc0ac\uc6a9\ud560 \ub54c\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume rm")," \ub4f1\uc744 \uc218\ud589\ud558\uc5ec \uc9c1\uc811 \uad00\ub9ac\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Docker volume"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Blind mount"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,a.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uac1c\ubc1c\ud560 \ub54c\ub294 bind mount \uac00 \ud3b8\ud558\uae34 \ud558\uc9c0\ub9cc, \ud658\uacbd\uc744 \uae54\ub054\ud558\uac8c \uc720\uc9c0\ud558\uace0 \uc2f6\ub2e4\uba74 docker volume \uc744 \uc0ac\uc6a9\ud558\uc5ec create, rm \uc744 \uba85\uc2dc\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\ub3c4 \ud558\ub098\uc758 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\ub3c4 \uacb0\uad6d docker \uc758 bind mount \ub97c \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-resource-limit"},"docker run with resource limit"),(0,a.kt)("p",null,"\uae30\ubcf8\uc801\uc73c\ub85c docker container \ub294 ",(0,a.kt)("strong",{parentName:"p"},"host OS \uc758 cpu, memory \uc790\uc6d0\uc744 fully \uc0ac\uc6a9"),"\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc774\ub807\uac8c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 host OS \uc758 \uc790\uc6d0 \uc0c1\ud669\uc5d0 \ub530\ub77c\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OOM")," \ub4f1\uc758 \uc774\uc288\ub85c docker container \uac00 \ube44\uc815\uc0c1\uc801\uc73c\ub85c \uc885\ub8cc\ub418\ub294 \uc0c1\ud669\uc774 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7f0 \ubb38\uc81c\ub97c \ub2e4\ub8e8\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"docker container \uc2e4\ud589 \uc2dc, cpu \uc640 memory \uc758 \uc0ac\uc6a9\ub7c9 \uc81c\ud55c"),"\uc744 \uac78 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"\uc635\uc158"),"\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,a.kt)("p",null,"\uc704\uc758 \ub3c4\ucee4\ub97c \uc2e4\ud589 \ud6c4 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker stats")," \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \uc0ac\uc6a9\ub7c9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c pod \ub77c\ub294 \ub9ac\uc18c\uc2a4\uc5d0 cpu, memory \uc81c\ud55c\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,a.kt)("p",null,"\ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uac00 \uacc4\uc18d\ud574\uc11c running \uc0c1\ud0dc\ub97c \uc720\uc9c0\uc2dc\ucf1c\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774\ub7f0 \uacbd\uc6b0\ub97c \uc704\ud574\uc11c \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc885\ub8cc\ub418\uc790\ub9c8\uc790 \ubc14\ub85c \uc7ac\uc0dd\uc131\uc744 \uc2dc\ub3c4\ud560 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"--restart=always")," \uc635\uc158\uc744 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc635\uc158 \uc785\ub825 \ud6c4 \ub3c4\ucee4\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps"),"\ub97c \ud1b5\ud574 \uc7ac\uc2e4\ud589\uc774 \ub418\uace0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.\n\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uace0 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 STATUS\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"on-failure with max retries"),(0,a.kt)("li",{parentName:"ul"},"always \ub4f1\uc758 \uc120\ud0dd\uc9c0 \uc81c\uacf5")))),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c job \uc774\ub77c\ub294 resource \uc758 restart \uc635\uc158\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-as-a-background-process"},"docker run as a background process"),(0,a.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud560 \ub54c\ub294 \uae30\ubcf8\uc801\uc73c\ub85c foreground process \ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud55c \ud130\ubbf8\ub110\uc774 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0 \uc790\ub3d9\uc73c\ub85c attach \ub418\uc5b4 \uc788\uc5b4, \ub2e4\ub978 \uba85\ub839\uc744 \uc2e4\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc608\uc2dc\ub97c \uc218\ud589\ud574\ubd05\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ud130\ubbf8\ub110 2 \uac1c\ub97c \uc5f4\uc5b4, \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker ps")," \ub97c \uc9c0\ucf1c\ubcf4\uace0, \ub2e4\ub978 \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc744 \ucc28\ub840\ub85c \uc2e4\ud589\ud574\ubcf4\uba70 \ub3d9\uc791\uc744 \uc9c0\ucf1c\ubd05\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"first-practice"},"First Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"10 \ucd08\ub3d9\uc548 \uba48\ucdb0 \uc788\uc5b4\uc57c \ud558\uace0, \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. 10\ucd08 \ub4a4\uc5d0\ub294 docker ps \uc5d0\uc11c container \uac00 \uc885\ub8cc\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"second-practice"},"Second Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"\uc774\ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + q")),(0,a.kt)("p",null,"\ud574\ub2f9 \ud130\ubbf8\ub110\uc5d0\uc11c \uc774\uc81c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc788\uac8c \ub418\uc5c8\uc73c\uba70, docker ps \ub85c\ub3c4 10\ucd08\uae4c\uc9c0\ub294 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc0b4\uc544\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub807\uac8c docker container \ub0b4\ubd80\uc5d0\uc11c \ube60\uc838\ub098\uc628 \uc0c1\ud669\uc744 detached \ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4.\n\ub3c4\ucee4\uc5d0\uc11c\ub294 run \uc744 \uc2e4\ud589\ud568\uacfc \ub3d9\uc2dc\uc5d0 detached mode \ub85c \uc2e4\ud589\uc2dc\ud0ac \uc218 \uc788\ub294 \uc635\uc158\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"third-practice"},"Third Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,a.kt)("p",null,"detached mode \uc774\ubbc0\ub85c \ud574\ub2f9 \uba85\ub839\uc744 \uc2e4\ud589\uc2dc\ud0a8 \ud130\ubbf8\ub110\uc5d0\uc11c \ub2e4\ub978 \uc561\uc158\uc744 \uc218\ud589\uc2dc\ud0ac \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc0c1\ud669\uc5d0 \ub530\ub77c detached mode \ub97c \uc801\uc808\ud788 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4, DB \uc640 \ud1b5\uc2e0\ud558\ub294 Backend API server \ub97c \uac1c\ubc1c\ud560 \ub54c Backend API server \ub294 source code \ub97c \ubcc0\uacbd\uc2dc\ucf1c\uac00\uba74\uc11c hot-loading \uc73c\ub85c \uacc4\uc18d\ud574\uc11c \ub85c\uadf8\ub97c \ud655\uc778\ud574\ubd10\uc57c \ud558\uc9c0\ub9cc, DB \ub294 \ub85c\uadf8\ub97c \uc9c0\ucf1c\ubcfc \ud544\uc694\ub294 \uc5c6\ub294 \uacbd\uc6b0\ub77c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","DB \ub294 docker container \ub97c detached mode \ub85c \uc2e4\ud589\uc2dc\ud0a4\uace0, Backend API server \ub294 attached mode \ub85c log \ub97c following \ud558\uba74\uc11c \uc2e4\ud589\uc2dc\ud0a4\uba74 \ud6a8\uc728\uc801\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}s.isMDXComponent=!0},3116:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},7229:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6639],{3905:(e,t,n)=>{n.d(t,{Zo:()=>k,kt:()=>m});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function o(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),c=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):o(o({},t),e)),n},k=function(e){var t=c(e.components);return r.createElement(p.Provider,{value:t},e.children)},d="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},u=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,p=e.parentName,k=i(e,["components","mdxType","originalType","parentName"]),d=c(n),u=a,m=d["".concat(p,".").concat(u)]||d[u]||s[u]||l;return n?r.createElement(m,o(o({ref:t},k),{},{components:n})):r.createElement(m,o({ref:t},k))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,o=new Array(l);o[0]=u;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[d]="string"==typeof e?e:a,o[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>o,default:()=>s,frontMatter:()=>l,metadata:()=>i,toc:()=>c});var r=n(7462),a=(n(7294),n(3905));const l={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/advanced",id:"version-1.0/prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/docs/1.0/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/advanced.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/docs/1.0/prerequisites/docker/images"}},p={},c=[{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30",level:2},{value:"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810",id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Docker tag \uc774\ub984 \uc9d3\uae30",id:"docker-tag-\uc774\ub984-\uc9d3\uae30",level:3},{value:"ETC",id:"etc",level:3},{value:"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158",id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158",level:2},{value:"docker run with volume",id:"docker-run-with-volume",level:3},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"docker run as a background process",id:"docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],k={toc:c},d="wrapper";function s(e){let{components:t,...l}=e;return(0,a.kt)(d,(0,r.Z)({},k,l,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0-\uc798-\ub9cc\ub4e4\uae30"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0 \uc798 \ub9cc\ub4e4\uae30"),(0,a.kt)("h3",{id:"\ub3c4\ucee4-\uc774\ubbf8\uc9c0\ub97c-\ub9cc\ub4e4-\ub54c-\uace0\ub824\ud574\uc57c-\ub420-\uc810"},"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c \uace0\ub824\ud574\uc57c \ub420 \uc810"),(0,a.kt)("p",null,"Dockerfile \uc744 \ud65c\uc6a9\ud558\uc5ec \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \ub54c\ub294 \uba85\ub839\uc5b4\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\uc21c\uc11c"),"\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8 \uc774\uc720\ub294 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub294 \uc5ec\ub7ec \uac1c\uc758 Read-Only Layer \ub85c \uad6c\uc131\ub418\uc5b4\uc788\uace0, \uc774\ubbf8\uc9c0\ub97c \ube4c\ub4dc\ud560 \ub54c \uc774\ubbf8 \uc874\uc7ac\ud558\ub294 \ub808\uc774\uc5b4\ub294 ",(0,a.kt)("strong",{parentName:"p"},"\uce90\uc2dc\ub418\uc5b4")," \uc7ac\uc0ac\uc6a9\ub418\uae30 \ub54c\ubb38\uc5d0, \uc774\ub97c \uc0dd\uac01\ud574\uc11c Dockerfile \uc744 \uad6c\uc131\ud55c\ub2e4\uba74 ",(0,a.kt)("strong",{parentName:"p"},"\ube4c\ub4dc \uc2dc\uac04\uc744 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"Dockerfile\uc5d0\uc11c ",(0,a.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY")," \uba85\ub839\uc5b4 \ud558\ub098\uac00 \ud558\ub098\uc758 \ub808\uc774\uc5b4\ub85c \uc800\uc7a5\ub429\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\ub85c \ube4c\ub4dc\ub41c \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash")," \uba85\ub839\uc5b4\ub85c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ub808\uc774\uc5b4\ub85c \ud45c\ud604\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"layers.png",src:n(3116).Z,width:"1080",height:"612"})),(0,a.kt)("p",null,"\ucd5c\uc0c1\ub2e8\uc758 R/W Layer \ub294 \uc774\ubbf8\uc9c0\uc5d0 \uc601\ud5a5\uc744 \uc8fc\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ub0b4\uc5ed\uc740 \ubaa8\ub450 \ud718\ubc1c\uc131\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub2e8\uc758 \ub808\uc774\uc5b4\uac00 \ubcc0\uacbd\ub418\uba74, \uadf8 \uc704\uc758 \ub808\uc774\uc5b4\ub294 \ubaa8\ub450 \uc0c8\ub85c \ube4c\ub4dc\ub429\ub2c8\ub2e4. \uadf8\ub798\uc11c Dockerfile \ub0b4\uc7a5 \uba85\ub839\uc5b4\uc758 \uc21c\uc11c\uac00 \uc911\uc694\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uba74, ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc \ubcc0\uacbd"),"\ub418\ub294 \ubd80\ubd84\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ucd5c\ub300\ud55c \ub4a4\ucabd\uc73c\ub85c")," \uc815\ub82c\ud558\ub294 \uac83\uc744 \ucd94\ucc9c\ud569\ub2c8\ub2e4. (ex. ",(0,a.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,a.kt)("p",null,"\uadf8\ub807\uae30 \ub54c\ubb38\uc5d0 \ubc18\ub300\ub85c \ubcc0\uacbd\ub418\uc9c0 \uc54a\ub294 \ubd80\ubd84\uc740 \ucd5c\ub300\ud55c \uc55e\ucabd\uc73c\ub85c \uc815\ub82c\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub9cc\uc57d \uac70\uc758 ",(0,a.kt)("strong",{parentName:"p"},"\ubcc0\uacbd\ub418\uc9c0 \uc54a\uc9c0\ub9cc"),", \uc5ec\ub7ec \uacf3\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\uc790\uc8fc")," \uc4f0\uc774\ub294 \ubd80\ubd84\uc744 \uacf5\ud1b5\ud654\ud560 \uc218\ub3c4 \uc788\uc2b5\ub2c8\ub2e4.\n\ud574\ub2f9 \uacf5\ud1b5\ubd80\ubd84\ub9cc \ubb36\uc5b4\uc11c \ubcc4\ub3c4\uc758 \uc774\ubbf8\uc9c0\ub294 \ubbf8\ub9ac \ub9cc\ub4e4\uc5b4\ub454 \ub2e4\uc74c, ",(0,a.kt)("strong",{parentName:"p"},"\ubca0\uc774\uc2a4 \uc774\ubbf8\uc9c0")," \ub85c \ud65c\uc6a9\ud558\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, \ub2e4\ub978 \uac74 \uac70\uc758 \ub611\uac19\uc740\ub370, tensorflow-cpu \ub97c \uc0ac\uc6a9\ud558\ub294 \uc774\ubbf8\uc9c0\uc640, tensorflow-gpu \ub97c \uc0ac\uc6a9\ud558\ub294 \ud658\uacbd\uc744 \ubd84\ub9ac\ud574\uc11c \uc774\ubbf8\uc9c0\ub85c \ub9cc\ub4e4\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","python \uacfc \uae30\ud0c0 \uae30\ubcf8\uc801\uc778 \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub41c ",(0,a.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,a.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," \ub97c \ub9cc\ub4e4\uc5b4\ub450\uace0, ",(0,a.kt)("strong",{parentName:"p"},"tensorflow cpu \ubc84\uc804\uacfc gpu \ubc84\uc804\uc774")," \uc124\uce58\ub41c \uc774\ubbf8\uc9c0 \uc0c8\ub85c \ub9cc\ub4e4\ub54c\ub294, \uc704\uc758 \uc774\ubbf8\uc9c0\ub97c ",(0,a.kt)("inlineCode",{parentName:"p"},"FROM")," \uc73c\ub85c \ubd88\ub7ec\uc628 \ub2e4\uc74c, tensorflow install \ud558\ub294 \ubd80\ubd84\ub9cc \ubcc4\ub3c4\ub85c \uc791\uc131\ud574\uc11c Dockerfile \uc744 2 \uac1c\ub85c \uad00\ub9ac\ud55c\ub2e4\uba74 \uac00\ub3c5\uc131\ub3c4 \uc88b\uace0 \ube4c\ub4dc \uc2dc\uac04\ub3c4 \uc904\uc77c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("strong",{parentName:"p"},"\ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \ud569\uce58\ub294 \uac83"),"\uc774 Old version \uc758 \ub3c4\ucee4\uc5d0\uc11c\ub294 \uc131\ub2a5 \ud5a5\uc0c1 \ud6a8\uacfc\ub97c \uc774\ub04c\uc5c8\uc2b5\ub2c8\ub2e4. \uc5ec\ub7ec\ubd84\uc758 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\uac00 \uc5b4\ub5a4 \ub3c4\ucee4 \ubc84\uc804\uc5d0\uc11c \uc2e4\ud589\ub420 \uac83\uc778\uc9c0 \ubcf4\uc7a5\ud560 \uc218 \uc5c6\uc73c\uba70, ",(0,a.kt)("strong",{parentName:"p"},"\uac00\ub3c5\uc131"),"\uc744 \uc704\ud574\uc11c\ub3c4 \ud569\uce60 \uc218 \uc788\ub294 Layer \ub294 \uc801\uc808\ud788 \ud569\uce58\ub294 \uac83\uc774 \uc88b\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ub41c ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,a.kt)("p",null,"\uc774\ub97c \uc544\ub798\uc640 \uac19\uc774 \ud569\uccd0\uc11c \uc801\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,a.kt)("p",null,"\ud3b8\uc758\ub97c \uc704\ud574\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore")," \ub3c4 \uc0ac\uc6a9\ud558\ub294\uac8c \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("inlineCode",{parentName:"p"},".dockerignore"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},".gitignore")," \uc640 \ube44\uc2b7\ud55c \uc5ed\ud560\uc744 \ud55c\ub2e4\uace0 \uc774\ud574\ud558\uba74 \ub429\ub2c8\ub2e4. (git add \ud560 \ub54c \uc81c\uc678\ud560 \uc218 \uc788\ub4ef\uc774, docker build \ud560 \ub54c \uc790\ub3d9\uc73c\ub85c \uc81c\uc678)"),(0,a.kt)("p",null,"\ub354 \ub9ce\uc740 \uc815\ubcf4\ub294 ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker \uacf5\uc2dd \ubb38\uc11c"),"\uc5d0\uc11c \ud655\uc778\ud558\uc2e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \ub294 \ubaa8\ub450 \ucee8\ud14c\uc774\ub108\uc758 \uc2e4\ud589 \uc2dc\uc810\uc5d0\uc11c \uc5b4\ub5a4 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\uc2dc\ud0a4\uace0 \uc2f6\uc744 \ub54c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \uc774 \ub458 \uc911 \ud558\ub098\ub294 \ubc18\ub4dc\uc2dc \uc874\uc7ac\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"\ucc28\uc774\uc810"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD"),": docker run \uc744 \uc218\ud589\ud560 \ub54c, \uc27d\uac8c \ubcc0\uacbd\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc74c"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": ",(0,a.kt)("inlineCode",{parentName:"li"},"--entrypoint")," \ub97c \uc0ac\uc6a9\ud574\uc57c \ubcc0\uacbd\ud560 \uc218 \uc788\uc74c")))),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc640 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD")," \uac00 \ud568\uaed8 \uc4f0\uc77c \ub54c\ub294 \ubcf4\ud1b5 ",(0,a.kt)("inlineCode",{parentName:"p"},"CMD"),"\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," \uc5d0\uc11c \uc801\uc740 \uba85\ub839\uc758 arguments(parameters) \ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile")," \uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,a.kt)("p",null,"\uc704\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"Dockerfile"),"\uc5d0\uc11c \uc8fc\uc11d\uc73c\ub85c \ud45c\uc2dc\ub41c \ubd80\ubd84\ub4e4\uc744 \ud574\uc81c\ud558\uba70 \ube4c\ub4dc\ud558\uace0 \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \uc5bb\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null}),(0,a.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,a.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"NO CMD")),(0,a.kt)("td",{parentName:"tr",align:null},"Error!"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,a.kt)("td",{parentName:"tr",align:null},"x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},(0,a.kt)("strong",{parentName:"td"},"CMD x y")),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,a.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,a.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"In Kubernetes pod",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," \u2192 command"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"CMD")," \u2192 args")))),(0,a.kt)("h3",{id:"docker-tag-\uc774\ub984-\uc9d3\uae30"},"Docker tag \uc774\ub984 \uc9d3\uae30"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc774\ubbf8\uc9c0\uc758 tag \ub85c ",(0,a.kt)("strong",{parentName:"p"},"latest \ub294 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\ub294 \uac83\uc744 \uad8c\uc7a5"),"\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\uc720\ub294 latest \ub294 default tag name \uc774\ubbc0\ub85c ",(0,a.kt)("strong",{parentName:"p"},"\uc758\ub3c4\uce58 \uc54a\uac8c overwritten")," \ub418\ub294 \uacbd\uc6b0\uac00 \ub108\ubb34 \ub9ce\uc774 \ubc1c\uc0dd\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ud558\ub098\uc758 \uc774\ubbf8\uc9c0\ub294 \ud558\ub098\uc758 \ud0dc\uadf8\ub97c \uac00\uc9d0(",(0,a.kt)("strong",{parentName:"p"},"uniqueness"),")\uc744 \ubcf4\uc7a5\ud574\uc57c \ucd94\ud6c4 Production \ub2e8\uacc4\uc5d0\uc11c ",(0,a.kt)("strong",{parentName:"p"},"\ud611\uc5c5/\ub514\ubc84\uae45"),"\uc5d0 \uc6a9\uc774\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub0b4\uc6a9\uc740 \ub2e4\ub974\uc9c0\ub9cc, \ub3d9\uc77c\ud55c tag \ub97c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 \ucd94\ud6c4 dangling image \ub85c \ucde8\uae09\ub418\uc5b4 \uad00\ub9ac\ud558\uae30 \uc5b4\ub824\uc6cc\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","dangling image\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker images"),"\uc5d0\ub294 \ub098\uc624\uc9c0 \uc54a\uc9c0\ub9cc \uacc4\uc18d\ud574\uc11c \uc800\uc7a5\uc18c\ub97c \ucc28\uc9c0\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"etc"},"ETC"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"log \ub4f1\uc758 \uc815\ubcf4\ub294 container \ub0b4\ubd80\uac00 \uc544\ub2cc \uacf3\uc5d0 \ub530\ub85c \uc800\uc7a5\ud569\ub2c8\ub2e4.\ncontainer \ub0b4\ubd80\uc5d0\uc11c write \ud55c data \ub294 \uc5b8\uc81c\ub4e0\uc9c0 \uc0ac\ub77c\uc9c8 \uc218 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"secret \ud55c \uc815\ubcf4, \ud658\uacbd(dev/prod) dependent \ud55c \uc815\ubcf4 \ub4f1\uc740 Dockerfile \uc5d0 \uc9c1\uc811 \uc801\ub294 \uac8c \uc544\ub2c8\ub77c, env var \ub610\ub294 .env config file \uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("li",{parentName:"ol"},"Dockerfile ",(0,a.kt)("strong",{parentName:"li"},"linter")," \ub3c4 \uc874\uc7ac\ud558\ubbc0\ub85c, \ud611\uc5c5 \uc2dc\uc5d0\ub294 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.\n",(0,a.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,a.kt)("h2",{id:"docker-run-\uc758-\ub2e4\uc591\ud55c-\uc635\uc158"},"docker run \uc758 \ub2e4\uc591\ud55c \uc635\uc158"),(0,a.kt)("h3",{id:"docker-run-with-volume"},"docker run with volume"),(0,a.kt)("p",null,"Docker container \uc0ac\uc6a9 \uc2dc \ubd88\ud3b8\ud55c \uc810\uc774 \uc788\uc2b5\ub2c8\ub2e4.\n\ubc14\ub85c Docker\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker ",(0,a.kt)("strong",{parentName:"p"},"container \ub0b4\ubd80\uc5d0\uc11c \uc791\uc5c5\ud55c \ubaa8\ub4e0 \uc0ac\ud56d\uc740 \uc800\uc7a5\ub418\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4."),"\n\uc774\uc720\ub294 Docker container \ub294 \uac01\uac01 \uaca9\ub9ac\ub41c \ud30c\uc77c\uc2dc\uc2a4\ud15c\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ub530\ub77c\uc11c, ",(0,a.kt)("strong",{parentName:"p"},"\uc5ec\ub7ec docker container \ub07c\ub9ac \ub370\uc774\ud130\ub97c \uacf5\uc720\ud558\uae30 \uc5b4\ub835\uc2b5\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\uc774 \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c Docker\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"2 \uac00\uc9c0"),"\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"storage.png",src:n(7229).Z,width:"501",height:"255"})),(0,a.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"docker cli \ub97c \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"li"},"volume")," \uc774\ub77c\ub294 \ub9ac\uc18c\uc2a4\ub97c \uc9c1\uc811 \uad00\ub9ac"),(0,a.kt)("li",{parentName:"ul"},"host \uc5d0\uc11c Docker area(",(0,a.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") \uc544\ub798\uc5d0 \ud2b9\uc815 \ub514\ub809\ud1a0\ub9ac\ub97c \uc0dd\uc131\ud55c \ub2e4\uc74c, \ud574\ub2f9 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"host \uc758 \ud2b9\uc815 \uacbd\ub85c\ub97c docker container \uc5d0 mount")),(0,a.kt)("h4",{id:"how-to-use"},"How to use?"),(0,a.kt)("p",null,"\uc0ac\uc6a9 \ubc29\uc2dd\uc740 ",(0,a.kt)("strong",{parentName:"p"},"\ub3d9\uc77c\ud55c \uc778\ud130\ud398\uc774\uc2a4"),"\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"-v")," \uc635\uc158\uc744 \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\ub9cc, volume \uc744 \uc0ac\uc6a9\ud560 \ub54c\uc5d0\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"docker volume rm")," \ub4f1\uc744 \uc218\ud589\ud558\uc5ec \uc9c1\uc811 \uad00\ub9ac\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Docker volume"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"Blind mount"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,a.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uac1c\ubc1c\ud560 \ub54c\ub294 bind mount \uac00 \ud3b8\ud558\uae34 \ud558\uc9c0\ub9cc, \ud658\uacbd\uc744 \uae54\ub054\ud558\uac8c \uc720\uc9c0\ud558\uace0 \uc2f6\ub2e4\uba74 docker volume \uc744 \uc0ac\uc6a9\ud558\uc5ec create, rm \uc744 \uba85\uc2dc\uc801\uc73c\ub85c \uc218\ud589\ud558\ub294 \uac83\ub3c4 \ud558\ub098\uc758 \ubc29\ubc95\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c \uc2a4\ud1a0\ub9ac\uc9c0\ub97c \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\ub3c4 \uacb0\uad6d docker \uc758 bind mount \ub97c \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-resource-limit"},"docker run with resource limit"),(0,a.kt)("p",null,"\uae30\ubcf8\uc801\uc73c\ub85c docker container \ub294 ",(0,a.kt)("strong",{parentName:"p"},"host OS \uc758 cpu, memory \uc790\uc6d0\uc744 fully \uc0ac\uc6a9"),"\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \uc774\ub807\uac8c \uc0ac\uc6a9\ud558\uac8c \ub418\uba74 host OS \uc758 \uc790\uc6d0 \uc0c1\ud669\uc5d0 \ub530\ub77c\uc11c ",(0,a.kt)("strong",{parentName:"p"},"OOM")," \ub4f1\uc758 \uc774\uc288\ub85c docker container \uac00 \ube44\uc815\uc0c1\uc801\uc73c\ub85c \uc885\ub8cc\ub418\ub294 \uc0c1\ud669\uc774 \ubc1c\uc0dd\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub7f0 \ubb38\uc81c\ub97c \ub2e4\ub8e8\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"docker container \uc2e4\ud589 \uc2dc, cpu \uc640 memory \uc758 \uc0ac\uc6a9\ub7c9 \uc81c\ud55c"),"\uc744 \uac78 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,a.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"\uc635\uc158"),"\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,a.kt)("p",null,"\uc704\uc758 \ub3c4\ucee4\ub97c \uc2e4\ud589 \ud6c4 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker stats")," \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \uc0ac\uc6a9\ub7c9\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c pod \ub77c\ub294 \ub9ac\uc18c\uc2a4\uc5d0 cpu, memory \uc81c\ud55c\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,a.kt)("p",null,"\ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uac00 \uacc4\uc18d\ud574\uc11c running \uc0c1\ud0dc\ub97c \uc720\uc9c0\uc2dc\ucf1c\uc57c \ud558\ub294 \uacbd\uc6b0\uac00 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774\ub7f0 \uacbd\uc6b0\ub97c \uc704\ud574\uc11c \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc885\ub8cc\ub418\uc790\ub9c8\uc790 \ubc14\ub85c \uc7ac\uc0dd\uc131\uc744 \uc2dc\ub3c4\ud560 \uc218 \uc788\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"--restart=always")," \uc635\uc158\uc744 \uc81c\uacf5\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc635\uc158 \uc785\ub825 \ud6c4 \ub3c4\ucee4\ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps"),"\ub97c \ud1b5\ud574 \uc7ac\uc2e4\ud589\uc774 \ub418\uace0 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.\n\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uace0 \uc788\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 STATUS\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," \uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"on-failure with max retries"),(0,a.kt)("li",{parentName:"ul"},"always \ub4f1\uc758 \uc120\ud0dd\uc9c0 \uc81c\uacf5")))),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0\uc11c job \uc774\ub77c\ub294 resource \uc758 restart \uc635\uc158\uc744 \uc904 \ub54c, \uc774 \ubc29\uc2dd\uc744 \ud65c\uc6a9\ud558\uc5ec \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"docker-run-as-a-background-process"},"docker run as a background process"),(0,a.kt)("p",null,"\ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud560 \ub54c\ub294 \uae30\ubcf8\uc801\uc73c\ub85c foreground process \ub85c \uc2e4\ud589\ub429\ub2c8\ub2e4. \uc989, \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud55c \ud130\ubbf8\ub110\uc774 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0 \uc790\ub3d9\uc73c\ub85c attach \ub418\uc5b4 \uc788\uc5b4, \ub2e4\ub978 \uba85\ub839\uc744 \uc2e4\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc608\uc2dc\ub97c \uc218\ud589\ud574\ubd05\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc6b0\uc120 \ud130\ubbf8\ub110 2 \uac1c\ub97c \uc5f4\uc5b4, \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"docker ps")," \ub97c \uc9c0\ucf1c\ubcf4\uace0, \ub2e4\ub978 \ud558\ub098\uc758 \ud130\ubbf8\ub110\uc5d0\uc11c\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uba85\ub839\uc744 \ucc28\ub840\ub85c \uc2e4\ud589\ud574\ubcf4\uba70 \ub3d9\uc791\uc744 \uc9c0\ucf1c\ubd05\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"first-practice"},"First Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"10 \ucd08\ub3d9\uc548 \uba48\ucdb0 \uc788\uc5b4\uc57c \ud558\uace0, \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uc5d0\uc11c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. 10\ucd08 \ub4a4\uc5d0\ub294 docker ps \uc5d0\uc11c container \uac00 \uc885\ub8cc\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"second-practice"},"Second Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,a.kt)("p",null,"\uc774\ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,a.kt)("inlineCode",{parentName:"p"},"ctrl + q")),(0,a.kt)("p",null,"\ud574\ub2f9 \ud130\ubbf8\ub110\uc5d0\uc11c \uc774\uc81c \ub2e4\ub978 \uba85\ub839\uc744 \uc218\ud589\ud560 \uc218 \uc788\uac8c \ub418\uc5c8\uc73c\uba70, docker ps \ub85c\ub3c4 10\ucd08\uae4c\uc9c0\ub294 \ud574\ub2f9 \ucee8\ud14c\uc774\ub108\uac00 \uc0b4\uc544\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ub807\uac8c docker container \ub0b4\ubd80\uc5d0\uc11c \ube60\uc838\ub098\uc628 \uc0c1\ud669\uc744 detached \ub77c\uace0 \ubd80\ub985\ub2c8\ub2e4.\n\ub3c4\ucee4\uc5d0\uc11c\ub294 run \uc744 \uc2e4\ud589\ud568\uacfc \ub3d9\uc2dc\uc5d0 detached mode \ub85c \uc2e4\ud589\uc2dc\ud0ac \uc218 \uc788\ub294 \uc635\uc158\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,a.kt)("h4",{id:"third-practice"},"Third Practice"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,a.kt)("p",null,"detached mode \uc774\ubbc0\ub85c \ud574\ub2f9 \uba85\ub839\uc744 \uc2e4\ud589\uc2dc\ud0a8 \ud130\ubbf8\ub110\uc5d0\uc11c \ub2e4\ub978 \uc561\uc158\uc744 \uc218\ud589\uc2dc\ud0ac \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc0c1\ud669\uc5d0 \ub530\ub77c detached mode \ub97c \uc801\uc808\ud788 \ud65c\uc6a9\ud558\uba74 \uc88b\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4, DB \uc640 \ud1b5\uc2e0\ud558\ub294 Backend API server \ub97c \uac1c\ubc1c\ud560 \ub54c Backend API server \ub294 source code \ub97c \ubcc0\uacbd\uc2dc\ucf1c\uac00\uba74\uc11c hot-loading \uc73c\ub85c \uacc4\uc18d\ud574\uc11c \ub85c\uadf8\ub97c \ud655\uc778\ud574\ubd10\uc57c \ud558\uc9c0\ub9cc, DB \ub294 \ub85c\uadf8\ub97c \uc9c0\ucf1c\ubcfc \ud544\uc694\ub294 \uc5c6\ub294 \uacbd\uc6b0\ub77c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \uc2e4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","DB \ub294 docker container \ub97c detached mode \ub85c \uc2e4\ud589\uc2dc\ud0a4\uace0, Backend API server \ub294 attached mode \ub85c log \ub97c following \ud558\uba74\uc11c \uc2e4\ud589\uc2dc\ud0a4\uba74 \ud6a8\uc728\uc801\uc785\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"references"},"References"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}s.isMDXComponent=!0},3116:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},7229:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file diff --git a/assets/js/c1242cde.58087205.js b/assets/js/c1242cde.540cfd76.js similarity index 98% rename from assets/js/c1242cde.58087205.js rename to assets/js/c1242cde.540cfd76.js index c1c8fd00..c309df32 100644 --- a/assets/js/c1242cde.58087205.js +++ b/assets/js/c1242cde.540cfd76.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2508],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),p=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,s=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=p(r),k=a,m=d["".concat(s,".").concat(k)]||d[k]||u[k]||l;return r?n.createElement(m,o(o({ref:t},c),{},{components:r})):n.createElement(m,o({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[d]="string"==typeof e?e:a,o[1]=i;for(var p=2;p{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const l={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/install",id:"version-1.0/prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/docs/1.0/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/install.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/docs/1.0/prerequisites/docker/introduction"}},s={},p=[{value:"Docker",id:"docker",level:2},{value:"\uc124\uce58 \ud655\uc778",id:"\uc124\uce58-\ud655\uc778",level:2},{value:"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c..",id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(d,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc2e4\uc2b5\uc744 \uc704\ud574 \ub3c4\ucee4\ub97c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc124\uce58\ub294 \uc5b4\ub5a4 OS\ub97c \uc0ac\uc6a9\ud558\ub294\uc9c0\uc5d0 \ub530\ub77c \ub2ec\ub77c\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uac01 \ud658\uacbd\uc5d0 \ub9de\ub294 \ub3c4\ucee4 \uc124\uce58\ub294 \uacf5\uc2dd \ud648\ud398\uc774\uc9c0\ub97c \ucc38\uace0\ud574\uc8fc\uc138\uc694."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"\uc124\uce58-\ud655\uc778"},"\uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294 OS, \ud130\ubbf8\ub110 \ud658\uacbd\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c"},"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c.."),(0,a.kt)("p",null,"MLOps\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574 \ud544\uc694\ud55c \ub3c4\ucee4 \uc0ac\uc6a9\ubc95\uc744 \uc124\uba85\ud558\ub2c8 \ub9ce\uc740 \ube44\uc720\uc640 \uc608\uc2dc\uac00 MLOps \ucabd\uc73c\ub85c \uce58\uc911\ub418\uc5b4 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2508],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function l(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function o(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),p=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):o(o({},t),e)),r},c=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,l=e.originalType,s=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=p(r),k=a,m=d["".concat(s,".").concat(k)]||d[k]||u[k]||l;return r?n.createElement(m,o(o({ref:t},c),{},{components:r})):n.createElement(m,o({ref:t},c))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=r.length,o=new Array(l);o[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[d]="string"==typeof e?e:a,o[1]=i;for(var p=2;p{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>i,toc:()=>p});var n=r(7462),a=(r(7294),r(3905));const l={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},o=void 0,i={unversionedId:"prerequisites/docker/install",id:"version-1.0/prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/versioned_docs/version-1.0/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/docs/1.0/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/install.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/docs/1.0/prerequisites/docker/introduction"}},s={},p=[{value:"Docker",id:"docker",level:2},{value:"\uc124\uce58 \ud655\uc778",id:"\uc124\uce58-\ud655\uc778",level:2},{value:"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c..",id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c",level:2}],c={toc:p},d="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(d,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"\ub3c4\ucee4 \uc2e4\uc2b5\uc744 \uc704\ud574 \ub3c4\ucee4\ub97c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4 \uc124\uce58\ub294 \uc5b4\ub5a4 OS\ub97c \uc0ac\uc6a9\ud558\ub294\uc9c0\uc5d0 \ub530\ub77c \ub2ec\ub77c\uc9d1\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uac01 \ud658\uacbd\uc5d0 \ub9de\ub294 \ub3c4\ucee4 \uc124\uce58\ub294 \uacf5\uc2dd \ud648\ud398\uc774\uc9c0\ub97c \ucc38\uace0\ud574\uc8fc\uc138\uc694."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"\uc124\uce58-\ud655\uc778"},"\uc124\uce58 \ud655\uc778"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," \uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294 OS, \ud130\ubbf8\ub110 \ud658\uacbd\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"\ub4e4\uc5b4\uac00\uae30-\uc55e\uc11c\uc11c"},"\ub4e4\uc5b4\uac00\uae30 \uc55e\uc11c\uc11c.."),(0,a.kt)("p",null,"MLOps\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574 \ud544\uc694\ud55c \ub3c4\ucee4 \uc0ac\uc6a9\ubc95\uc744 \uc124\uba85\ud558\ub2c8 \ub9ce\uc740 \ube44\uc720\uc640 \uc608\uc2dc\uac00 MLOps \ucabd\uc73c\ub85c \uce58\uc911\ub418\uc5b4 \uc788\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c37072e4.06249857.js b/assets/js/c37072e4.88bc509b.js similarity index 98% rename from assets/js/c37072e4.06249857.js rename to assets/js/c37072e4.88bc509b.js index e9dc8db5..5952bcc4 100644 --- a/assets/js/c37072e4.06249857.js +++ b/assets/js/c37072e4.88bc509b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9325],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,u=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),c=o(n),f=i,m=c["".concat(u,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var p={};for(var u in t)hasOwnProperty.call(t,u)&&(p[u]=t[u]);p.originalType=e,p[c]="string"==typeof e?e:i,l[1]=p;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>p,toc:()=>o});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"kubeflow/basic-run",id:"version-1.0/kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/docs/1.0/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/docs/1.0/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/1.0/kubeflow/advanced-component"}},u={},o=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Create Run \uc120\ud0dd",id:"1-create-run-\uc120\ud0dd",level:3},{value:"2. Experiment \uc120\ud0dd",id:"2-experiment-\uc120\ud0dd",level:3},{value:"3. Pipeline Config \uc785\ub825",id:"3-pipeline-config-\uc785\ub825",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:o},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"\uc774\uc81c \uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\uc2dc\ucf1c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiment\ub780 Kubeflow \uc5d0\uc11c \uc2e4\ud589\ub418\ub294 Run\uc744 \ub17c\ub9ac\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ub2e8\uc704\uc785\ub2c8\ub2e4. "),(0,i.kt)("p",null,"Kubeflow\uc5d0\uc11c namespace\ub97c \ucc98\uc74c \ub4e4\uc5b4\uc624\uba74 \uc0dd\uc131\ub418\uc5b4 \uc788\ub294 Experiment\uac00 \uc5c6\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc804\uc5d0 \ubbf8\ub9ac Experiment\ub97c \uc0dd\uc131\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. Experiment\uc774 \uc788\ub2e4\uba74 ",(0,i.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"\uc73c\ub85c \ub118\uc5b4\uac00\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"Experiment\ub294 Create Experiment \ubc84\ud2bc\uc744 \ud1b5\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(5997).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,"Experiment\ub85c \uc0ac\uc6a9\ud560 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.\n",(0,i.kt)("img",{alt:"run-1.png",src:n(7523).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-create-run-\uc120\ud0dd"},"1. Create Run \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(9005).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-experiment-\uc120\ud0dd"},"2. Experiment \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(4048).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(1363).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-pipeline-config-\uc785\ub825"},"3. Pipeline Config \uc785\ub825"),(0,i.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc0dd\uc131\ud560 \ub54c \uc785\ub825\ud55c Config \uac12\ub4e4\uc744 \ucc44\uc6cc \ub123\uc2b5\ub2c8\ub2e4.\n\uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc740 number_1\uacfc number_2\ub97c \uc785\ub825\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(7705).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"\uc785\ub825 \ud6c4 Start \ubc84\ud2bc\uc744 \ub204\ub974\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(576).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"\uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\ub4e4\uc740 Runs \ud0ed\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nRun\uc744 \ud074\ub9ad\ud558\uba74 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uacfc \uad00\ub828\ub41c \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(3297).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4. \uc544\uc9c1 \uc2e4\ud589\ub418\uc9c0 \uc54a\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ud68c\uc0c9 \ud45c\uc2dc\ub85c \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(7895).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\uc774 \uc644\ub8cc\ub418\uba74 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc\uac00 \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(2687).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"\uac00\uc7a5 \ub9c8\uc9c0\ub9c9 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ubcf4\uba74 \uc785\ub825\ud55c Config\uc778 3\uacfc 5\uc758 \ud569\uc778 8\uc774 \ucd9c\ub825\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(4786).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},5997:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},7523:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},1363:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},9005:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},7705:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},576:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},3297:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},7895:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},2687:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},4786:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},4048:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9325],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,u=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),c=o(n),f=i,m=c["".concat(u,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var p={};for(var u in t)hasOwnProperty.call(t,u)&&(p[u]=t[u]);p.originalType=e,p[c]="string"==typeof e?e:i,l[1]=p;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>p,toc:()=>o});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"kubeflow/basic-run",id:"version-1.0/kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/docs/1.0/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/docs/1.0/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/docs/1.0/kubeflow/advanced-component"}},u={},o=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Create Run \uc120\ud0dd",id:"1-create-run-\uc120\ud0dd",level:3},{value:"2. Experiment \uc120\ud0dd",id:"2-experiment-\uc120\ud0dd",level:3},{value:"3. Pipeline Config \uc785\ub825",id:"3-pipeline-config-\uc785\ub825",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:o},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"\uc774\uc81c \uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\uc2dc\ucf1c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiment\ub780 Kubeflow \uc5d0\uc11c \uc2e4\ud589\ub418\ub294 Run\uc744 \ub17c\ub9ac\uc801\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \ub2e8\uc704\uc785\ub2c8\ub2e4. "),(0,i.kt)("p",null,"Kubeflow\uc5d0\uc11c namespace\ub97c \ucc98\uc74c \ub4e4\uc5b4\uc624\uba74 \uc0dd\uc131\ub418\uc5b4 \uc788\ub294 Experiment\uac00 \uc5c6\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc804\uc5d0 \ubbf8\ub9ac Experiment\ub97c \uc0dd\uc131\ud574\ub450\uc5b4\uc57c \ud569\ub2c8\ub2e4. Experiment\uc774 \uc788\ub2e4\uba74 ",(0,i.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"\uc73c\ub85c \ub118\uc5b4\uac00\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,"Experiment\ub294 Create Experiment \ubc84\ud2bc\uc744 \ud1b5\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(5997).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,"Experiment\ub85c \uc0ac\uc6a9\ud560 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.\n",(0,i.kt)("img",{alt:"run-1.png",src:n(7523).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-create-run-\uc120\ud0dd"},"1. Create Run \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(9005).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-experiment-\uc120\ud0dd"},"2. Experiment \uc120\ud0dd"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(4048).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(1363).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-pipeline-config-\uc785\ub825"},"3. Pipeline Config \uc785\ub825"),(0,i.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc0dd\uc131\ud560 \ub54c \uc785\ub825\ud55c Config \uac12\ub4e4\uc744 \ucc44\uc6cc \ub123\uc2b5\ub2c8\ub2e4.\n\uc5c5\ub85c\ub4dc\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc740 number_1\uacfc number_2\ub97c \uc785\ub825\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(7705).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"\uc785\ub825 \ud6c4 Start \ubc84\ud2bc\uc744 \ub204\ub974\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc774 \uc2e4\ud589\ub429\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(576).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"\uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\ub4e4\uc740 Runs \ud0ed\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\nRun\uc744 \ud074\ub9ad\ud558\uba74 \uc2e4\ud589\ub41c \ud30c\uc774\ud504\ub77c\uc778\uacfc \uad00\ub828\ub41c \uc790\uc138\ud55c \ub0b4\uc6a9\uc744 \ud655\uc778\ud574 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(3297).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4. \uc544\uc9c1 \uc2e4\ud589\ub418\uc9c0 \uc54a\uc740 \ucef4\ud3ec\ub10c\ud2b8\ub294 \ud68c\uc0c9 \ud45c\uc2dc\ub85c \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(7895).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc2e4\ud589\uc774 \uc644\ub8cc\ub418\uba74 \ucd08\ub85d\uc0c9 \uccb4\ud06c \ud45c\uc2dc\uac00 \ub098\uc635\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(2687).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"\uac00\uc7a5 \ub9c8\uc9c0\ub9c9 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ubcf4\uba74 \uc785\ub825\ud55c Config\uc778 3\uacfc 5\uc758 \ud569\uc778 8\uc774 \ucd9c\ub825\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(4786).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},5997:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},7523:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},1363:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},9005:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},7705:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},576:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},3297:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},7895:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},2687:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},4786:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},4048:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file diff --git a/assets/js/c430b8da.4b2e9285.js b/assets/js/c430b8da.9598db3e.js similarity index 99% rename from assets/js/c430b8da.4b2e9285.js rename to assets/js/c430b8da.9598db3e.js index 9b3f92cc..7b0c844e 100644 --- a/assets/js/c430b8da.4b2e9285.js +++ b/assets/js/c430b8da.9598db3e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3684],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>y});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,i=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=l,y=u["".concat(s,".").concat(c)]||u[c]||h[c]||i;return t?a.createElement(y,r(r({ref:n},v),{},{components:t})):a.createElement(y,r({ref:n},v))}));function y(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var i=t.length,r=new Array(i);r[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:l,r[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>h,frontMatter:()=>i,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const i={title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},r=void 0,o={unversionedId:"appendix/pyenv",id:"version-1.0/appendix/pyenv",title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",description:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",source:"@site/versioned_docs/version-1.0/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/docs/1.0/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/pyenv.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/docs/1.0/api-deployment/seldon-children"},next:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/1.0/appendix/metallb"}},s={},p=[{value:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd",level:2},{value:"pyenv \uc124\uce58",id:"pyenv-\uc124\uce58",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"\uc124\uce58 - macOS",id:"\uc124\uce58---macos",level:3},{value:"\uc124\uce58 - Ubuntu",id:"\uc124\uce58---ubuntu",level:3},{value:"pyenv \uc0ac\uc6a9",id:"pyenv-\uc0ac\uc6a9",level:2},{value:"Python \ubc84\uc804 \uc124\uce58",id:"python-\ubc84\uc804-\uc124\uce58",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654",id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,l.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd"},"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),(0,l.kt)("p",null,"Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\ub2e4 \ubcf4\uba74 \uc5ec\ub7ec \ubc84\uc804\uc758 Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\ub098, \uc5ec\ub7ec \ud504\ub85c\uc81d\ud2b8\ubcc4 \ud328\ud0a4\uc9c0 \ubc84\uc804\uc744 \ub530\ub85c \uad00\ub9ac\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uac00 \ubc1c\uc0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\ucc98\ub7fc Python \ud658\uacbd \ud639\uc740 Python Package \ud658\uacbd\uc744 \uac00\uc0c1\ud654\ud558\uc5ec \uad00\ub9ac\ud558\ub294 \uac83\uc744 \uc27d\uac8c \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\ub85c\ub294 pyenv, conda, virtualenv, venv \ub4f1\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774 \uc911 ",(0,l.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv"),"\uc640 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),"\ub97c \uc124\uce58\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","pyenv\ub294 Python \ubc84\uc804\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\uba70, pyenv-virtualenv\ub294 pyenv\uc758 plugin\uc73c\ub85c\uc368 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0 \ud658\uacbd\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc90d\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pyenv-\uc124\uce58"},"pyenv \uc124\uce58"),(0,l.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,l.kt)("p",null,"\uc6b4\uc601 \uccb4\uc81c\ubcc4\ub85c Prerequisites\uac00 \ub2e4\ub985\ub2c8\ub2e4. ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"\ub2e4\uc74c \ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc5ec \ud544\uc218 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"\uc124\uce58---macos"},"\uc124\uce58 - macOS"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"macOS\uc758 \uacbd\uc6b0 \uce74\ud0c8\ub9ac\ub098 \ubc84\uc804 \uc774\ud6c4 \uae30\ubcf8 shell\uc774 zsh\ub85c \ubcc0\uacbd\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 zsh\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h3",{id:"\uc124\uce58---ubuntu"},"\uc124\uce58 - Ubuntu"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc774 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\n\uc911\ub7b5...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"\uae30\ubcf8 shell\ub85c bash shell\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4.\nbash\uc5d0\uc11c pyenv\uc640 pyenv-virtualenv \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \ubb38\uc790\uc5f4\uc744 \uc785\ub825\ud55c \ud6c4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,l.kt)("p",null,"shell\uc744 restart \ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h2",{id:"pyenv-\uc0ac\uc6a9"},"pyenv \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"python-\ubc84\uc804-\uc124\uce58"},"Python \ubc84\uc804 \uc124\uce58"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv install ")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc744 \uc124\uce58\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc608\uc2dc\ub85c kubeflow\uc5d0\uc11c \uae30\ubcf8\uc73c\ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc774\uc36c 3.7.12 \ubc84\uc804\uc744 \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131"},"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv <\uac00\uc0c1\ud658\uacbd-\uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc758 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c Python 3.7.12 \ubc84\uc804\uc758 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9"},"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv activate <\uac00\uc0c1\ud658\uacbd \uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc704\uc640 \uac19\uc740 \ubc29\uc2dd\uc73c\ub85c \uc0dd\uc131\ud55c \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ud604\uc7ac \uac00\uc0c1\ud658\uacbd\uc758 \uc815\ubcf4\uac00 shell\uc758 \ub9e8 \uc55e\uc5d0 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654"},"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"source deactivate")," \uba85\ub839\uc744 \ud1b5\ud574 \ud604\uc7ac \uc0ac\uc6a9 \uc911\uc778 \uac00\uc0c1\ud658\uacbd\uc744 \ube44\ud65c\uc131\ud654\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3684],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>y});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,i=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=l,y=u["".concat(s,".").concat(c)]||u[c]||h[c]||i;return t?a.createElement(y,r(r({ref:n},v),{},{components:t})):a.createElement(y,r({ref:n},v))}));function y(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var i=t.length,r=new Array(i);r[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:l,r[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>h,frontMatter:()=>i,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const i={title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},r=void 0,o={unversionedId:"appendix/pyenv",id:"version-1.0/appendix/pyenv",title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",description:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",source:"@site/versioned_docs/version-1.0/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/docs/1.0/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/pyenv.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/docs/1.0/api-deployment/seldon-children"},next:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/1.0/appendix/metallb"}},s={},p=[{value:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd",level:2},{value:"pyenv \uc124\uce58",id:"pyenv-\uc124\uce58",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"\uc124\uce58 - macOS",id:"\uc124\uce58---macos",level:3},{value:"\uc124\uce58 - Ubuntu",id:"\uc124\uce58---ubuntu",level:3},{value:"pyenv \uc0ac\uc6a9",id:"pyenv-\uc0ac\uc6a9",level:2},{value:"Python \ubc84\uc804 \uc124\uce58",id:"python-\ubc84\uc804-\uc124\uce58",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654",id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,l.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd"},"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),(0,l.kt)("p",null,"Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\ub2e4 \ubcf4\uba74 \uc5ec\ub7ec \ubc84\uc804\uc758 Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\ub098, \uc5ec\ub7ec \ud504\ub85c\uc81d\ud2b8\ubcc4 \ud328\ud0a4\uc9c0 \ubc84\uc804\uc744 \ub530\ub85c \uad00\ub9ac\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uac00 \ubc1c\uc0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\ucc98\ub7fc Python \ud658\uacbd \ud639\uc740 Python Package \ud658\uacbd\uc744 \uac00\uc0c1\ud654\ud558\uc5ec \uad00\ub9ac\ud558\ub294 \uac83\uc744 \uc27d\uac8c \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\ub85c\ub294 pyenv, conda, virtualenv, venv \ub4f1\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774 \uc911 ",(0,l.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv"),"\uc640 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),"\ub97c \uc124\uce58\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","pyenv\ub294 Python \ubc84\uc804\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\uba70, pyenv-virtualenv\ub294 pyenv\uc758 plugin\uc73c\ub85c\uc368 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0 \ud658\uacbd\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc90d\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pyenv-\uc124\uce58"},"pyenv \uc124\uce58"),(0,l.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,l.kt)("p",null,"\uc6b4\uc601 \uccb4\uc81c\ubcc4\ub85c Prerequisites\uac00 \ub2e4\ub985\ub2c8\ub2e4. ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"\ub2e4\uc74c \ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc5ec \ud544\uc218 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"\uc124\uce58---macos"},"\uc124\uce58 - macOS"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"macOS\uc758 \uacbd\uc6b0 \uce74\ud0c8\ub9ac\ub098 \ubc84\uc804 \uc774\ud6c4 \uae30\ubcf8 shell\uc774 zsh\ub85c \ubcc0\uacbd\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 zsh\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h3",{id:"\uc124\uce58---ubuntu"},"\uc124\uce58 - Ubuntu"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc774 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\n\uc911\ub7b5...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"\uae30\ubcf8 shell\ub85c bash shell\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4.\nbash\uc5d0\uc11c pyenv\uc640 pyenv-virtualenv \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \ubb38\uc790\uc5f4\uc744 \uc785\ub825\ud55c \ud6c4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,l.kt)("p",null,"shell\uc744 restart \ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h2",{id:"pyenv-\uc0ac\uc6a9"},"pyenv \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"python-\ubc84\uc804-\uc124\uce58"},"Python \ubc84\uc804 \uc124\uce58"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv install ")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc744 \uc124\uce58\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc608\uc2dc\ub85c kubeflow\uc5d0\uc11c \uae30\ubcf8\uc73c\ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc774\uc36c 3.7.12 \ubc84\uc804\uc744 \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131"},"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv <\uac00\uc0c1\ud658\uacbd-\uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc758 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c Python 3.7.12 \ubc84\uc804\uc758 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9"},"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv activate <\uac00\uc0c1\ud658\uacbd \uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc704\uc640 \uac19\uc740 \ubc29\uc2dd\uc73c\ub85c \uc0dd\uc131\ud55c \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ud604\uc7ac \uac00\uc0c1\ud658\uacbd\uc758 \uc815\ubcf4\uac00 shell\uc758 \ub9e8 \uc55e\uc5d0 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654"},"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"source deactivate")," \uba85\ub839\uc744 \ud1b5\ud574 \ud604\uc7ac \uc0ac\uc6a9 \uc911\uc778 \uac00\uc0c1\ud658\uacbd\uc744 \ube44\ud65c\uc131\ud654\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/c4afae5c.db1f7d62.js b/assets/js/c4afae5c.34e05a5a.js similarity index 97% rename from assets/js/c4afae5c.db1f7d62.js rename to assets/js/c4afae5c.34e05a5a.js index 1e2a1bc1..4c7b4ace 100644 --- a/assets/js/c4afae5c.db1f7d62.js +++ b/assets/js/c4afae5c.34e05a5a.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5825],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=a,b=c["".concat(s,".").concat(m)]||c[m]||d[m]||o;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>u});var n=r(7462),a=(r(7294),r(3905));const o={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/docs/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/docs/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/docs/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/docs/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-kf"},"Kubeflow \uc124\uce58"),"\ub97c \uc644\ub8cc\ud558\uba74, \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"after-login",src:r(5215).Z,width:"4008",height:"1266"})),(0,a.kt)("p",null,"Central Dashboard\ub294 Kubeflow\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \ud1b5\ud569\ud558\uc5ec \uc81c\uacf5\ud558\ub294 UI\uc785\ub2c8\ub2e4. Central Dashboard\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \uae30\ub2a5\uc740 \ud06c\uac8c \uc67c\ucabd\uc758 \ud0ed\uc744 \uae30\uc900\uc73c\ub85c \uad6c\ubd84\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Home"),(0,a.kt)("li",{parentName:"ul"},"Notebooks"),(0,a.kt)("li",{parentName:"ul"},"Tensorboards"),(0,a.kt)("li",{parentName:"ul"},"Volumes"),(0,a.kt)("li",{parentName:"ul"},"Models"),(0,a.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,a.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,a.kt)("li",{parentName:"ul"},"Pipelines"),(0,a.kt)("li",{parentName:"ul"},"Runs"),(0,a.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,a.kt)("li",{parentName:"ul"},"Artifacts"),(0,a.kt)("li",{parentName:"ul"},"Executions")),(0,a.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uae30\ub2a5\ubcc4 \uac04\ub2e8\ud55c \uc0ac\uc6a9\ubc95\uc744 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},5215:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5825],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=a,b=c["".concat(s,".").concat(m)]||c[m]||d[m]||o;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>u});var n=r(7462),a=(r(7294),r(3905));const o={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/docs/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/docs/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/docs/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/docs/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-kf"},"Kubeflow \uc124\uce58"),"\ub97c \uc644\ub8cc\ud558\uba74, \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \ud1b5\ud574 \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"after-login",src:r(5215).Z,width:"4008",height:"1266"})),(0,a.kt)("p",null,"Central Dashboard\ub294 Kubeflow\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \ubaa8\ub4e0 \uae30\ub2a5\uc744 \ud1b5\ud569\ud558\uc5ec \uc81c\uacf5\ud558\ub294 UI\uc785\ub2c8\ub2e4. Central Dashboard\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 \uae30\ub2a5\uc740 \ud06c\uac8c \uc67c\ucabd\uc758 \ud0ed\uc744 \uae30\uc900\uc73c\ub85c \uad6c\ubd84\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:r(7511).Z,width:"3940",height:"1278"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Home"),(0,a.kt)("li",{parentName:"ul"},"Notebooks"),(0,a.kt)("li",{parentName:"ul"},"Tensorboards"),(0,a.kt)("li",{parentName:"ul"},"Volumes"),(0,a.kt)("li",{parentName:"ul"},"Models"),(0,a.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,a.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,a.kt)("li",{parentName:"ul"},"Pipelines"),(0,a.kt)("li",{parentName:"ul"},"Runs"),(0,a.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,a.kt)("li",{parentName:"ul"},"Artifacts"),(0,a.kt)("li",{parentName:"ul"},"Executions")),(0,a.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uae30\ub2a5\ubcc4 \uac04\ub2e8\ud55c \uc0ac\uc6a9\ubc95\uc744 \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},5215:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},7511:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/assets/js/c58e39e2.9f5738d0.js b/assets/js/c58e39e2.485ccbff.js similarity index 99% rename from assets/js/c58e39e2.9f5738d0.js rename to assets/js/c58e39e2.485ccbff.js index f0dbd3ea..a9b62233 100644 --- a/assets/js/c58e39e2.9f5738d0.js +++ b/assets/js/c58e39e2.485ccbff.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1804],{3905:(e,t,i)=>{i.d(t,{Zo:()=>c,kt:()=>b});var n=i(7294);function l(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function p(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),i.push.apply(i,n)}return i}function a(e){for(var t=1;t=0||(l[i]=e[i]);return l}(e,t);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,i)&&(l[i]=e[i])}return l}var o=n.createContext({}),s=function(e){var t=n.useContext(o),i=t;return e&&(i="function"==typeof e?e(t):a(a({},t),e)),i},c=function(e){var t=s(e.components);return n.createElement(o.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},g=n.forwardRef((function(e,t){var i=e.components,l=e.mdxType,p=e.originalType,o=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),u=s(i),g=l,b=u["".concat(o,".").concat(g)]||u[g]||d[g]||p;return i?n.createElement(b,a(a({ref:t},c),{},{components:i})):n.createElement(b,a({ref:t},c))}));function b(e,t){var i=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var p=i.length,a=new Array(p);a[0]=g;var r={};for(var o in t)hasOwnProperty.call(t,o)&&(r[o]=t[o]);r.originalType=e,r[u]="string"==typeof e?e:l,a[1]=r;for(var s=2;s{i.r(t),i.d(t,{assets:()=>o,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>r,toc:()=>s});var n=i(7462),l=(i(7294),i(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,r={unversionedId:"kubeflow/basic-pipeline-upload",id:"kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/docs/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/docs/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline-upload.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/docs/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/docs/kubeflow/basic-run"}},o={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Pipelines \ud0ed \uc120\ud0dd",id:"1-pipelines-\ud0ed-\uc120\ud0dd",level:3},{value:"2. Upload Pipeline \uc120\ud0dd",id:"2-upload-pipeline-\uc120\ud0dd",level:3},{value:"3. Choose file \uc120\ud0dd",id:"3-choose-file-\uc120\ud0dd",level:3},{value:"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc",id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"\uc774\uc81c \uc6b0\ub9ac\uac00 \ub9cc\ub4e0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc9c1\uc811 kubeflow\uc5d0\uc11c \uc5c5\ub85c\ub4dc \ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ub294 kubeflow \ub300\uc2dc\ubcf4\ub4dc UI\ub97c \ud1b5\ud574 \uc9c4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n",(0,l.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-kf#%EC%A0%95%EC%83%81-%EC%84%A4%EC%B9%98-%ED%99%95%EC%9D%B8"},"Install Kubeflow")," \uc5d0\uc11c \uc0ac\uc6a9\ud55c \ubc29\ubc95\uc744 \uc774\uc6a9\ud574 \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080"),"\uc5d0 \uc811\uc18d\ud574 \ub300\uc2dc\ubcf4\ub4dc\ub97c \uc5f4\uc5b4\uc90d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-pipelines-\ud0ed-\uc120\ud0dd"},"1. Pipelines \ud0ed \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:i(8924).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-upload-pipeline-\uc120\ud0dd"},"2. Upload Pipeline \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:i(2893).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-choose-file-\uc120\ud0dd"},"3. Choose file \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:i(3951).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc"},"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:i(6439).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(4181).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc740 \uc5c5\ub85c\ub4dc\ub97c \ud1b5\ud574\uc11c \ubc84\uc804\uc744 \uad00\ub9ac\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub2e4\ub9cc \uae43\ud5d9\uacfc \uac19\uc740 \ucf54\ub4dc \ucc28\uc6d0\uc758 \ubc84\uc804 \uad00\ub9ac\uac00 \uc544\ub2cc \uac19\uc740 \uc774\ub984\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.\n\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud55c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 example_pipeline\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:i(536).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(4181).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Upload Version\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uc218 \uc788\ub294 \ud654\uba74\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:i(7375).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc \ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:i(8568).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778 \ubc84\uc804\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:i(6975).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},8924:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},2893:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},3951:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},6439:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},4181:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},536:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},7375:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},8568:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},6975:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1804],{3905:(e,t,i)=>{i.d(t,{Zo:()=>c,kt:()=>b});var n=i(7294);function l(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function p(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),i.push.apply(i,n)}return i}function a(e){for(var t=1;t=0||(l[i]=e[i]);return l}(e,t);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,i)&&(l[i]=e[i])}return l}var o=n.createContext({}),s=function(e){var t=n.useContext(o),i=t;return e&&(i="function"==typeof e?e(t):a(a({},t),e)),i},c=function(e){var t=s(e.components);return n.createElement(o.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},g=n.forwardRef((function(e,t){var i=e.components,l=e.mdxType,p=e.originalType,o=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),u=s(i),g=l,b=u["".concat(o,".").concat(g)]||u[g]||d[g]||p;return i?n.createElement(b,a(a({ref:t},c),{},{components:i})):n.createElement(b,a({ref:t},c))}));function b(e,t){var i=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var p=i.length,a=new Array(p);a[0]=g;var r={};for(var o in t)hasOwnProperty.call(t,o)&&(r[o]=t[o]);r.originalType=e,r[u]="string"==typeof e?e:l,a[1]=r;for(var s=2;s{i.r(t),i.d(t,{assets:()=>o,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>r,toc:()=>s});var n=i(7462),l=(i(7294),i(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,r={unversionedId:"kubeflow/basic-pipeline-upload",id:"kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/docs/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/docs/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline-upload.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/docs/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/docs/kubeflow/basic-run"}},o={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Pipelines \ud0ed \uc120\ud0dd",id:"1-pipelines-\ud0ed-\uc120\ud0dd",level:3},{value:"2. Upload Pipeline \uc120\ud0dd",id:"2-upload-pipeline-\uc120\ud0dd",level:3},{value:"3. Choose file \uc120\ud0dd",id:"3-choose-file-\uc120\ud0dd",level:3},{value:"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc",id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:t,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"\uc774\uc81c \uc6b0\ub9ac\uac00 \ub9cc\ub4e0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc9c1\uc811 kubeflow\uc5d0\uc11c \uc5c5\ub85c\ub4dc \ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ub294 kubeflow \ub300\uc2dc\ubcf4\ub4dc UI\ub97c \ud1b5\ud574 \uc9c4\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n",(0,l.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-kf#%EC%A0%95%EC%83%81-%EC%84%A4%EC%B9%98-%ED%99%95%EC%9D%B8"},"Install Kubeflow")," \uc5d0\uc11c \uc0ac\uc6a9\ud55c \ubc29\ubc95\uc744 \uc774\uc6a9\ud574 \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080"),"\uc5d0 \uc811\uc18d\ud574 \ub300\uc2dc\ubcf4\ub4dc\ub97c \uc5f4\uc5b4\uc90d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"1-pipelines-\ud0ed-\uc120\ud0dd"},"1. Pipelines \ud0ed \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:i(8924).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-upload-pipeline-\uc120\ud0dd"},"2. Upload Pipeline \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:i(2893).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-choose-file-\uc120\ud0dd"},"3. Choose file \uc120\ud0dd"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:i(3951).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-\uc0dd\uc131\ub41c-yaml\ud30c\uc77c-\uc5c5\ub85c\ub4dc"},"4. \uc0dd\uc131\ub41c yaml\ud30c\uc77c \uc5c5\ub85c\ub4dc"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:i(6439).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(4181).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \ud30c\uc774\ud504\ub77c\uc778\uc740 \uc5c5\ub85c\ub4dc\ub97c \ud1b5\ud574\uc11c \ubc84\uc804\uc744 \uad00\ub9ac\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ub2e4\ub9cc \uae43\ud5d9\uacfc \uac19\uc740 \ucf54\ub4dc \ucc28\uc6d0\uc758 \ubc84\uc804 \uad00\ub9ac\uac00 \uc544\ub2cc \uac19\uc740 \uc774\ub984\uc758 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uc5ed\ud560\uc744 \ud569\ub2c8\ub2e4.\n\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud55c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 example_pipeline\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:i(536).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:i(4181).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Upload Version\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc\ud560 \uc218 \uc788\ub294 \ud654\uba74\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:i(7375).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc744 \uc5c5\ub85c\ub4dc \ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:i(8568).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"\uc5c5\ub85c\ub4dc\ub41c \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc774 \ud30c\uc774\ud504\ub77c\uc778 \ubc84\uc804\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:i(6975).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},8924:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},2893:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},3951:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},6439:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},4181:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},536:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},7375:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},8568:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},6975:(e,t,i)=>{i.d(t,{Z:()=>n});const n=i.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file diff --git a/assets/js/c8feb4f8.27abc486.js b/assets/js/c8feb4f8.69143075.js similarity index 99% rename from assets/js/c8feb4f8.27abc486.js rename to assets/js/c8feb4f8.69143075.js index 246d8312..6af8e2ea 100644 --- a/assets/js/c8feb4f8.27abc486.js +++ b/assets/js/c8feb4f8.69143075.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6540],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var i=n.createContext({}),u=function(e){var t=n.useContext(i),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=o,f=c["".concat(i,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,s(s({ref:t},p),{},{components:r})):n.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,s=new Array(a);s[0]=m;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"kubeflow-dashboard-guide/volumes",id:"version-1.0/kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/docs/1.0/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/volumes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/docs/1.0/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments"}},i={},u=[{value:"Volumes",id:"volumes",level:2},{value:"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30",id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30",level:2}],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"volumes"},"Volumes"),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Volumes\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"volumes",src:r(4144).Z,width:"1386",height:"382"})),(0,o.kt)("p",null,"Volumes \ud0ed\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/volumes/"},"Kubernetes\uc758 \ubcfc\ub968(Volume)"),", \uc815\ud655\ud788\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/persistent-volumes/"},"\ud37c\uc2dc\uc2a4\ud134\ud2b8 \ubcfc\ub968 \ud074\ub808\uc784(Persistent Volume Claim, \uc774\ud558 pvc)")," \uc911 \ud604\uc7ac user\uc758 namespace\uc5d0 \uc18d\ud55c pvc\ub97c \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc704 \uc2a4\ud06c\ub9b0\uc0f7\uc744 \ubcf4\uba74, ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," \ud398\uc774\uc9c0\uc5d0\uc11c \uc0dd\uc131\ud55c Volume\uc758 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 Volume\uc758 Storage Class\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c Default Storage Class\uc778 local-path\ub85c \uc124\uc815\ub418\uc5b4\uc788\uc74c\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc678\uc5d0\ub3c4 user namespace\uc5d0 \uc0c8\ub85c\uc6b4 \ubcfc\ub968\uc744 \uc0dd\uc131\ud558\uac70\ub098, \uc870\ud68c\ud558\uac70\ub098, \uc0ad\uc81c\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0 Volumes \ud398\uc774\uc9c0\ub97c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("hr",null),(0,o.kt)("h2",{id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30"},"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30"),(0,o.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,o.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"new-volume",src:r(5166).Z,width:"1192",height:"934"})),(0,o.kt)("p",null,"name, size, storage class, access mode\ub97c \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6d0\ud558\ub294 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc744 \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcfc\ub968\uc758 Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending"),"\uc73c\ub85c \uc870\ud68c\ub429\ub2c8\ub2e4. ",(0,o.kt)("inlineCode",{parentName:"p"},"Status")," \uc544\uc774\ucf58\uc5d0 \ub9c8\uc6b0\uc2a4 \ucee4\uc11c\ub97c \uac00\uc838\ub2e4 \ub300\uba74 ",(0,o.kt)("em",{parentName:"p"},"\ud574\ub2f9 \ubcfc\ub968\uc740 mount\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 first consumer\uac00 \ub098\ud0c0\ub0a0 \ub54c \uc2e4\uc81c\ub85c \uc0dd\uc131\uc744 \uc9c4\ud589\ud55c\ub2e4(This volume will be bound when its first consumer is created.)"),"\ub294 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass"),"\uc778 ",(0,o.kt)("inlineCode",{parentName:"p"},"local-path"),"\uc758 \ubcfc\ub968 \uc0dd\uc131 \uc815\ucc45\uc5d0 \ud574\ub2f9\ud558\uba70, ",(0,o.kt)("strong",{parentName:"p"},"\ubb38\uc81c \uc0c1\ud669\uc774 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \ud398\uc774\uc9c0\uc5d0\uc11c Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending")," \uc73c\ub85c \ubcf4\uc774\ub354\ub77c\ub3c4 \ud574\ub2f9 \ubcfc\ub968\uc744 \uc0ac\uc6a9\ud558\uae38 \uc6d0\ud558\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ud639\uc740 \ud30c\ub4dc(Pod)\uc5d0\uc11c\ub294 \ud574\ub2f9 \ubcfc\ub968\uc758 \uc774\ub984\uc744 \uc9c0\uc815\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc73c\uba70, \uadf8\ub54c \uc2e4\uc81c\ub85c \ubcfc\ub968 \uc0dd\uc131\uc774 \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"creating-volume",src:r(9658).Z,width:"1572",height:"450"})))}d.isMDXComponent=!0},9658:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},5166:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},4144:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6540],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function s(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var i=n.createContext({}),u=function(e){var t=n.useContext(i),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),m=o,f=c["".concat(i,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,s(s({ref:t},p),{},{components:r})):n.createElement(f,s({ref:t},p))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,s=new Array(a);s[0]=m;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[c]="string"==typeof e?e:o,s[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>d,frontMatter:()=>a,metadata:()=>l,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"kubeflow-dashboard-guide/volumes",id:"version-1.0/kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/docs/1.0/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/volumes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/docs/1.0/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments"}},i={},u=[{value:"Volumes",id:"volumes",level:2},{value:"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30",id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30",level:2}],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"volumes"},"Volumes"),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Volumes\ub97c \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"volumes",src:r(4144).Z,width:"1386",height:"382"})),(0,o.kt)("p",null,"Volumes \ud0ed\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/volumes/"},"Kubernetes\uc758 \ubcfc\ub968(Volume)"),", \uc815\ud655\ud788\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/persistent-volumes/"},"\ud37c\uc2dc\uc2a4\ud134\ud2b8 \ubcfc\ub968 \ud074\ub808\uc784(Persistent Volume Claim, \uc774\ud558 pvc)")," \uc911 \ud604\uc7ac user\uc758 namespace\uc5d0 \uc18d\ud55c pvc\ub97c \uad00\ub9ac\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc704 \uc2a4\ud06c\ub9b0\uc0f7\uc744 \ubcf4\uba74, ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," \ud398\uc774\uc9c0\uc5d0\uc11c \uc0dd\uc131\ud55c Volume\uc758 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 Volume\uc758 Storage Class\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c Default Storage Class\uc778 local-path\ub85c \uc124\uc815\ub418\uc5b4\uc788\uc74c\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\uc678\uc5d0\ub3c4 user namespace\uc5d0 \uc0c8\ub85c\uc6b4 \ubcfc\ub968\uc744 \uc0dd\uc131\ud558\uac70\ub098, \uc870\ud68c\ud558\uac70\ub098, \uc0ad\uc81c\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uc5d0 Volumes \ud398\uc774\uc9c0\ub97c \ud65c\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("hr",null),(0,o.kt)("h2",{id:"\ubcfc\ub968-\uc0dd\uc131\ud558\uae30"},"\ubcfc\ub968 \uc0dd\uc131\ud558\uae30"),(0,o.kt)("p",null,"\uc624\ub978\ucabd \uc704\uc758 ",(0,o.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc744 \ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"new-volume",src:r(5166).Z,width:"1192",height:"934"})),(0,o.kt)("p",null,"name, size, storage class, access mode\ub97c \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc6d0\ud558\ub294 \ub9ac\uc18c\uc2a4 \uc2a4\ud399\uc744 \uc9c0\uc815\ud558\uc5ec \uc0dd\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcfc\ub968\uc758 Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending"),"\uc73c\ub85c \uc870\ud68c\ub429\ub2c8\ub2e4. ",(0,o.kt)("inlineCode",{parentName:"p"},"Status")," \uc544\uc774\ucf58\uc5d0 \ub9c8\uc6b0\uc2a4 \ucee4\uc11c\ub97c \uac00\uc838\ub2e4 \ub300\uba74 ",(0,o.kt)("em",{parentName:"p"},"\ud574\ub2f9 \ubcfc\ub968\uc740 mount\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 first consumer\uac00 \ub098\ud0c0\ub0a0 \ub54c \uc2e4\uc81c\ub85c \uc0dd\uc131\uc744 \uc9c4\ud589\ud55c\ub2e4(This volume will be bound when its first consumer is created.)"),"\ub294 \uba54\uc2dc\uc9c0\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass"),"\uc778 ",(0,o.kt)("inlineCode",{parentName:"p"},"local-path"),"\uc758 \ubcfc\ub968 \uc0dd\uc131 \uc815\ucc45\uc5d0 \ud574\ub2f9\ud558\uba70, ",(0,o.kt)("strong",{parentName:"p"},"\ubb38\uc81c \uc0c1\ud669\uc774 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("br",{parentName:"p"}),"\n","\ud574\ub2f9 \ud398\uc774\uc9c0\uc5d0\uc11c Status\uac00 ",(0,o.kt)("inlineCode",{parentName:"p"},"Pending")," \uc73c\ub85c \ubcf4\uc774\ub354\ub77c\ub3c4 \ud574\ub2f9 \ubcfc\ub968\uc744 \uc0ac\uc6a9\ud558\uae38 \uc6d0\ud558\ub294 \ub178\ud2b8\ubd81 \uc11c\ubc84 \ud639\uc740 \ud30c\ub4dc(Pod)\uc5d0\uc11c\ub294 \ud574\ub2f9 \ubcfc\ub968\uc758 \uc774\ub984\uc744 \uc9c0\uc815\ud558\uc5ec \uc0ac\uc6a9\ud560 \uc218 \uc788\uc73c\uba70, \uadf8\ub54c \uc2e4\uc81c\ub85c \ubcfc\ub968 \uc0dd\uc131\uc774 \uc9c4\ud589\ub429\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"creating-volume",src:r(9658).Z,width:"1572",height:"450"})))}d.isMDXComponent=!0},9658:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},5166:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},4144:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file diff --git a/assets/js/cf706b7e.8f1b64d7.js b/assets/js/cf706b7e.bc31be8d.js similarity index 99% rename from assets/js/cf706b7e.8f1b64d7.js rename to assets/js/cf706b7e.bc31be8d.js index 3d12dff1..f6228330 100644 --- a/assets/js/cf706b7e.8f1b64d7.js +++ b/assets/js/cf706b7e.bc31be8d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2779],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>k});var t=n(7294);function l(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a=0||(l[n]=e[n]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(l[n]=e[n])}return l}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},c=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},d=t.forwardRef((function(e,a){var n=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(n),d=l,k=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return n?t.createElement(k,s(s({ref:a},c),{},{components:n})):t.createElement(k,s({ref:a},c))}));function k(e,a){var n=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=n.length,s=new Array(r);s[0]=d;var o={};for(var i in a)hasOwnProperty.call(a,i)&&(o[i]=a[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var t=n(7462),l=(n(7294),n(3905));const r={title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},s=void 0,o={unversionedId:"appendix/metallb",id:"appendix/metallb",title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",description:"MetalLB\ub780?",source:"@site/docs/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/docs/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/metallb.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/appendix/pyenv"},next:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",permalink:"/docs/further-readings/info"}},i={},p=[{value:"MetalLB\ub780?",id:"metallb\ub780",level:2},{value:"\uc694\uad6c\uc0ac\ud56d",id:"\uc694\uad6c\uc0ac\ud56d",level:2},{value:"MetalLB \uc124\uce58",id:"metallb-\uc124\uce58",level:2},{value:"Preparation",id:"preparation",level:3},{value:"\uc124\uce58 - Manifest",id:"\uc124\uce58---manifest",level:3},{value:"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4.",id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4",level:4},{value:"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"MetalLB \uc0ac\uc6a9",id:"metallb-\uc0ac\uc6a9",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},m="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(m,(0,t.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"metallb\ub780"},"MetalLB\ub780?"),(0,l.kt)("p",null,"Kubernetes \uc0ac\uc6a9 \uc2dc AWS, GCP, Azure \uc640 \uac19\uc740 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc5d0\uc11c\ub294 \uc790\uccb4\uc801\uc73c\ub85c \ub85c\ub4dc \ubca8\ub7f0\uc11c(Load Balancer)\ub97c \uc81c\uacf5\ud574 \uc8fc\uc9c0\ub9cc, \uc628\ud504\ub808\ubbf8\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ubaa8\ub4c8\uc744 \ucd94\uac00\uc801\uc73c\ub85c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB"),"\ub294 \ubca0\uc5b4\uba54\ud0c8 \ud658\uacbd\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc11c\ub97c \uc81c\uacf5\ud558\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub85c\uc81d\ud2b8 \uc785\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"\uc694\uad6c\uc0ac\ud56d"},"\uc694\uad6c\uc0ac\ud56d"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"\uc694\uad6c \uc0ac\ud56d"),(0,l.kt)("th",{parentName:"tr",align:null},"\ubc84\uc804 \ubc0f \ub0b4\uc6a9"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"\ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc774 \uc5c6\ub294 >= v1.13.0")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"\ud638\ud658\uac00\ub2a5\ud55c \ub124\ud2b8\uc6cc\ud06c CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 \uc8fc\uc18c"),(0,l.kt)("td",{parentName:"tr",align:null},"MetalLB \ubc30\ud3ec\uc5d0 \uc0ac\uc6a9")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0"),(0,l.kt)("td",{parentName:"tr",align:null},"BGP \uae30\ub2a5\uc744 \uc9c0\uc6d0\ud558\ub294 \ud558\ub098 \uc774\uc0c1\uc758 \ub77c\uc6b0\ud130")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"\ub178\ub4dc \uac04 \ud3ec\ud2b8 TCP/UDP 7946 \uc624\ud508"),(0,l.kt)("td",{parentName:"tr",align:null},"memberlist \uc694\uad6c \uc0ac\ud56d")))),(0,l.kt)("h2",{id:"metallb-\uc124\uce58"},"MetalLB \uc124\uce58"),(0,l.kt)("h3",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"IPVS \ubaa8\ub4dc\uc5d0\uc11c kube-proxy\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0 Kubernetes v1.14.2 \uc774\ud6c4\ubd80\ud130\ub294 \uc5c4\uaca9\ud55c ARP(strictARP) \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud558\ub3c4\ub85d \uc124\uc815\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Kube-router\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc5c4\uaca9\ud55c ARP\ub97c \ud65c\uc131\ud654\ud558\ubbc0\ub85c \uc11c\ube44\uc2a4 \ud504\ub85d\uc2dc\ub85c \uc0ac\uc6a9\ud560 \uacbd\uc6b0\uc5d0\ub294 \uc774 \uae30\ub2a5\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc5c4\uaca9\ud55c ARP \ubaa8\ub4dc\ub97c \uc801\uc6a9\ud558\uae30\uc5d0 \uc55e\uc11c, \ud604\uc7ac \ubaa8\ub4dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"strictARP: false \uac00 \ucd9c\ub825\ub418\ub294 \uacbd\uc6b0 \ub2e4\uc74c\uc744 \uc2e4\ud589\ud558\uc5ec strictARP: true\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4.\n(strictARP: true\uac00 \uc774\ubbf8 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \uc218\ud589\ud558\uc9c0 \uc54a\uc73c\uc154\ub3c4 \ub429\ub2c8\ub2e4.)"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"\uc124\uce58---manifest"},"\uc124\uce58 - Manifest"),(0,l.kt)("h4",{id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4"},"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,l.kt)("p",null,"metallb-system namespace \uc758 2 \uac1c\uc758 pod \uc774 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"\ub9e4\ub2c8\ud398\uc2a4\ud2b8\uc758 \uad6c\uc131 \uc694\uc18c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"deployment \ub85c \ubc30\ud3ec\ub418\uba70, \ub85c\ub4dc \ubca8\ub7f0\uc2f1\uc744 \uc218\ud589\ud560 external IP \uc8fc\uc18c\uc758 \ud560\ub2f9\uc744 \ucc98\ub9ac\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"daemonset \ud615\ud0dc\ub85c \ubc30\ud3ec\ub418\uba70, \uc678\ubd80 \ud2b8\ub798\ud53d\uacfc \uc11c\ube44\uc2a4\ub97c \uc5f0\uacb0\ud574 \ub124\ud2b8\uc6cc\ud06c \ud1b5\uc2e0\uc774 \uac00\ub2a5\ud558\ub3c4\ub85d \uad6c\uc131\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4.")))),(0,l.kt)("p",null,"\uc11c\ube44\uc2a4\uc5d0\ub294 \ucee8\ud2b8\ub864\ub7ec \ubc0f \uc2a4\ud53c\ucee4\uc640 \uad6c\uc131 \uc694\uc18c\uac00 \uc791\ub3d9\ud558\ub294 \ub370 \ud544\uc694\ud55c RBAC \uc0ac\uc6a9 \uad8c\ud55c\uc774 \ud3ec\ud568\ub429\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"MetalLB \uc758 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uc815\ucc45 \uc124\uc815\uc740 \uad00\ub828 \uc124\uc815 \uc815\ubcf4\ub97c \ub2f4\uc740 configmap \uc744 \ubc30\ud3ec\ud558\uc5ec \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"MetalLB \uc5d0\uc11c \uad6c\uc131\ud560 \uc218 \uc788\ub294 \ubaa8\ub4dc\ub85c\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 \ubaa8\ub4dc")),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP \ubaa8\ub4dc"))),(0,l.kt)("p",null,"\uc5ec\uae30\uc5d0\uc11c\ub294 Layer 2 \ubaa8\ub4dc\ub85c \uc9c4\ud589\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"Layer 2 \ubaa8\ub4dc\ub294 \uac04\ub2e8\ud558\uac8c \uc0ac\uc6a9\ud560 IP \uc8fc\uc18c\uc758 \ub300\uc5ed\ub9cc \uc124\uc815\ud558\uba74 \ub429\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Layer 2 \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc6cc\ucee4 \ub178\ub4dc\uc758 \ub124\ud2b8\uc6cc\ud06c \uc778\ud130\ud398\uc774\uc2a4\uc5d0 IP\ub97c \ubc14\uc778\ub529 \ud558\uc9c0 \uc54a\uc544\ub3c4 \ub418\ub294\ub370 \ub85c\uceec \ub124\ud2b8\uc6cc\ud06c\uc758 ARP \uc694\uccad\uc5d0 \uc9c1\uc811 \uc751\ub2f5\ud558\uc5ec \ucef4\ud4e8\ud130\uc758 MAC\uc8fc\uc18c\ub97c \ud074\ub77c\uc774\uc5b8\ud2b8\uc5d0 \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc73c\ub85c \uc791\ub3d9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub2e4\uc74c ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," \ud30c\uc77c\uc740 MetalLB \uac00 192.168.35.100 ~ 192.168.35.110\uc758 IP\uc5d0 \ub300\ud55c \uc81c\uc5b4 \uad8c\ud55c\uc744 \uc81c\uacf5\ud558\uace0 Layer 2 \ubaa8\ub4dc\ub97c \uad6c\uc131\ud558\ub294 \uc124\uc815\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, 192.168.35.100 ~ 192.168.35.110 \ub300\uc5ed\uc774 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc640 \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc \ubaa8\ub450 \uc811\uadfc \uac00\ub2a5\ud55c \ub300\uc5ed\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc124\uc815\uc744 \uc801\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"metallb-\uc0ac\uc6a9"},"MetalLB \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 kubeflow\uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 istio-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 istio-ingressgateway \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer"),"\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100")," \uc778 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:n(5568).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 minio \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 kubeflow \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 minio-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:n(369).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 mlflow \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 mlflow-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 mlflow-server-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:n(7827).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 Grafana \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 seldon-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 seldon-core-analytics-grafana \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:n(2043).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},2043:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},5568:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},369:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},7827:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2779],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>k});var t=n(7294);function l(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a=0||(l[n]=e[n]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(l[n]=e[n])}return l}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},c=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},d=t.forwardRef((function(e,a){var n=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(n),d=l,k=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return n?t.createElement(k,s(s({ref:a},c),{},{components:n})):t.createElement(k,s({ref:a},c))}));function k(e,a){var n=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=n.length,s=new Array(r);s[0]=d;var o={};for(var i in a)hasOwnProperty.call(a,i)&&(o[i]=a[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var t=n(7462),l=(n(7294),n(3905));const r={title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},s=void 0,o={unversionedId:"appendix/metallb",id:"appendix/metallb",title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",description:"MetalLB\ub780?",source:"@site/docs/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/docs/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/metallb.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/appendix/pyenv"},next:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",permalink:"/docs/further-readings/info"}},i={},p=[{value:"MetalLB\ub780?",id:"metallb\ub780",level:2},{value:"\uc694\uad6c\uc0ac\ud56d",id:"\uc694\uad6c\uc0ac\ud56d",level:2},{value:"MetalLB \uc124\uce58",id:"metallb-\uc124\uce58",level:2},{value:"Preparation",id:"preparation",level:3},{value:"\uc124\uce58 - Manifest",id:"\uc124\uce58---manifest",level:3},{value:"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4.",id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4",level:4},{value:"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"MetalLB \uc0ac\uc6a9",id:"metallb-\uc0ac\uc6a9",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},m="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(m,(0,t.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"metallb\ub780"},"MetalLB\ub780?"),(0,l.kt)("p",null,"Kubernetes \uc0ac\uc6a9 \uc2dc AWS, GCP, Azure \uc640 \uac19\uc740 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc5d0\uc11c\ub294 \uc790\uccb4\uc801\uc73c\ub85c \ub85c\ub4dc \ubca8\ub7f0\uc11c(Load Balancer)\ub97c \uc81c\uacf5\ud574 \uc8fc\uc9c0\ub9cc, \uc628\ud504\ub808\ubbf8\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ubaa8\ub4c8\uc744 \ucd94\uac00\uc801\uc73c\ub85c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB"),"\ub294 \ubca0\uc5b4\uba54\ud0c8 \ud658\uacbd\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc11c\ub97c \uc81c\uacf5\ud558\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub85c\uc81d\ud2b8 \uc785\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"\uc694\uad6c\uc0ac\ud56d"},"\uc694\uad6c\uc0ac\ud56d"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"\uc694\uad6c \uc0ac\ud56d"),(0,l.kt)("th",{parentName:"tr",align:null},"\ubc84\uc804 \ubc0f \ub0b4\uc6a9"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"\ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc774 \uc5c6\ub294 >= v1.13.0")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"\ud638\ud658\uac00\ub2a5\ud55c \ub124\ud2b8\uc6cc\ud06c CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 \uc8fc\uc18c"),(0,l.kt)("td",{parentName:"tr",align:null},"MetalLB \ubc30\ud3ec\uc5d0 \uc0ac\uc6a9")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0"),(0,l.kt)("td",{parentName:"tr",align:null},"BGP \uae30\ub2a5\uc744 \uc9c0\uc6d0\ud558\ub294 \ud558\ub098 \uc774\uc0c1\uc758 \ub77c\uc6b0\ud130")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"\ub178\ub4dc \uac04 \ud3ec\ud2b8 TCP/UDP 7946 \uc624\ud508"),(0,l.kt)("td",{parentName:"tr",align:null},"memberlist \uc694\uad6c \uc0ac\ud56d")))),(0,l.kt)("h2",{id:"metallb-\uc124\uce58"},"MetalLB \uc124\uce58"),(0,l.kt)("h3",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"IPVS \ubaa8\ub4dc\uc5d0\uc11c kube-proxy\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0 Kubernetes v1.14.2 \uc774\ud6c4\ubd80\ud130\ub294 \uc5c4\uaca9\ud55c ARP(strictARP) \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud558\ub3c4\ub85d \uc124\uc815\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Kube-router\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc5c4\uaca9\ud55c ARP\ub97c \ud65c\uc131\ud654\ud558\ubbc0\ub85c \uc11c\ube44\uc2a4 \ud504\ub85d\uc2dc\ub85c \uc0ac\uc6a9\ud560 \uacbd\uc6b0\uc5d0\ub294 \uc774 \uae30\ub2a5\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc5c4\uaca9\ud55c ARP \ubaa8\ub4dc\ub97c \uc801\uc6a9\ud558\uae30\uc5d0 \uc55e\uc11c, \ud604\uc7ac \ubaa8\ub4dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"strictARP: false \uac00 \ucd9c\ub825\ub418\ub294 \uacbd\uc6b0 \ub2e4\uc74c\uc744 \uc2e4\ud589\ud558\uc5ec strictARP: true\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4.\n(strictARP: true\uac00 \uc774\ubbf8 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \uc218\ud589\ud558\uc9c0 \uc54a\uc73c\uc154\ub3c4 \ub429\ub2c8\ub2e4.)"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"\uc124\uce58---manifest"},"\uc124\uce58 - Manifest"),(0,l.kt)("h4",{id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4"},"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,l.kt)("p",null,"metallb-system namespace \uc758 2 \uac1c\uc758 pod \uc774 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"\ub9e4\ub2c8\ud398\uc2a4\ud2b8\uc758 \uad6c\uc131 \uc694\uc18c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"deployment \ub85c \ubc30\ud3ec\ub418\uba70, \ub85c\ub4dc \ubca8\ub7f0\uc2f1\uc744 \uc218\ud589\ud560 external IP \uc8fc\uc18c\uc758 \ud560\ub2f9\uc744 \ucc98\ub9ac\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"daemonset \ud615\ud0dc\ub85c \ubc30\ud3ec\ub418\uba70, \uc678\ubd80 \ud2b8\ub798\ud53d\uacfc \uc11c\ube44\uc2a4\ub97c \uc5f0\uacb0\ud574 \ub124\ud2b8\uc6cc\ud06c \ud1b5\uc2e0\uc774 \uac00\ub2a5\ud558\ub3c4\ub85d \uad6c\uc131\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4.")))),(0,l.kt)("p",null,"\uc11c\ube44\uc2a4\uc5d0\ub294 \ucee8\ud2b8\ub864\ub7ec \ubc0f \uc2a4\ud53c\ucee4\uc640 \uad6c\uc131 \uc694\uc18c\uac00 \uc791\ub3d9\ud558\ub294 \ub370 \ud544\uc694\ud55c RBAC \uc0ac\uc6a9 \uad8c\ud55c\uc774 \ud3ec\ud568\ub429\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"MetalLB \uc758 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uc815\ucc45 \uc124\uc815\uc740 \uad00\ub828 \uc124\uc815 \uc815\ubcf4\ub97c \ub2f4\uc740 configmap \uc744 \ubc30\ud3ec\ud558\uc5ec \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"MetalLB \uc5d0\uc11c \uad6c\uc131\ud560 \uc218 \uc788\ub294 \ubaa8\ub4dc\ub85c\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 \ubaa8\ub4dc")),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP \ubaa8\ub4dc"))),(0,l.kt)("p",null,"\uc5ec\uae30\uc5d0\uc11c\ub294 Layer 2 \ubaa8\ub4dc\ub85c \uc9c4\ud589\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"Layer 2 \ubaa8\ub4dc\ub294 \uac04\ub2e8\ud558\uac8c \uc0ac\uc6a9\ud560 IP \uc8fc\uc18c\uc758 \ub300\uc5ed\ub9cc \uc124\uc815\ud558\uba74 \ub429\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Layer 2 \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc6cc\ucee4 \ub178\ub4dc\uc758 \ub124\ud2b8\uc6cc\ud06c \uc778\ud130\ud398\uc774\uc2a4\uc5d0 IP\ub97c \ubc14\uc778\ub529 \ud558\uc9c0 \uc54a\uc544\ub3c4 \ub418\ub294\ub370 \ub85c\uceec \ub124\ud2b8\uc6cc\ud06c\uc758 ARP \uc694\uccad\uc5d0 \uc9c1\uc811 \uc751\ub2f5\ud558\uc5ec \ucef4\ud4e8\ud130\uc758 MAC\uc8fc\uc18c\ub97c \ud074\ub77c\uc774\uc5b8\ud2b8\uc5d0 \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc73c\ub85c \uc791\ub3d9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub2e4\uc74c ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," \ud30c\uc77c\uc740 MetalLB \uac00 192.168.35.100 ~ 192.168.35.110\uc758 IP\uc5d0 \ub300\ud55c \uc81c\uc5b4 \uad8c\ud55c\uc744 \uc81c\uacf5\ud558\uace0 Layer 2 \ubaa8\ub4dc\ub97c \uad6c\uc131\ud558\ub294 \uc124\uc815\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, 192.168.35.100 ~ 192.168.35.110 \ub300\uc5ed\uc774 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc640 \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc \ubaa8\ub450 \uc811\uadfc \uac00\ub2a5\ud55c \ub300\uc5ed\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc124\uc815\uc744 \uc801\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"metallb-\uc0ac\uc6a9"},"MetalLB \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 kubeflow\uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 istio-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 istio-ingressgateway \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer"),"\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100")," \uc778 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:n(5568).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 minio \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 kubeflow \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 minio-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:n(369).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 mlflow \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 mlflow-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 mlflow-server-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:n(7827).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 Grafana \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 seldon-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 seldon-core-analytics-grafana \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:n(2043).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},2043:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},5568:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},369:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},7827:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file diff --git a/assets/js/d0b69af8.0e280640.js b/assets/js/d0b69af8.2f8d078b.js similarity index 99% rename from assets/js/d0b69af8.0e280640.js rename to assets/js/d0b69af8.2f8d078b.js index 04c5a14a..79653f6c 100644 --- a/assets/js/d0b69af8.0e280640.js +++ b/assets/js/d0b69af8.2f8d078b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2429],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>h});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=a.createContext({}),i=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=i(e.components);return a.createElement(p.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=i(n),m=r,h=d["".concat(p,".").concat(m)]||d[m]||u[m]||o;return n?a.createElement(h,l(l({ref:t},c),{},{components:n})):a.createElement(h,l({ref:t},c))}));function h(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[d]="string"==typeof e?e:r,l[1]=s;for(var i=2;i{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>s,toc:()=>i});var a=n(7462),r=(n(7294),n(3905));const o={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-components/install-components-pg",id:"version-1.0/setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/versioned_docs/version-1.0/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/docs/1.0/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/docs/1.0/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/docs/1.0/kubeflow-dashboard-guide/intro"}},p={},i=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3},{value:"References",id:"references",level:2}],c={toc:i},d="wrapper";function u(e){let{components:t,...o}=e;return(0,r.kt)(d,(0,a.Z)({},c,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4(Prometheus) \uc640 \uadf8\ub77c\ud30c\ub098(Grafana) \ub294 \ubaa8\ub2c8\ud130\ub9c1\uc744 \uc704\ud55c \ub3c4\uad6c\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc548\uc815\uc801\uc778 \uc11c\ube44\uc2a4 \uc6b4\uc601\uc744 \uc704\ud574\uc11c\ub294 \uc11c\ube44\uc2a4\uc640 \uc11c\ube44\uc2a4\uac00 \uc6b4\uc601\ub418\uace0 \uc788\ub294 \uc778\ud504\ub77c\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ucc30\ud558\uace0, \uad00\ucc30\ud55c \uba54\ud2b8\ub9ad\uc744 \ubc14\ud0d5\uc73c\ub85c \ubb38\uc81c\uac00 \uc0dd\uae38 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ubaa8\ub2c8\ud130\ub9c1\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud558\uae30 \uc704\ud55c \ub9ce\uc740 \ub3c4\uad6c \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc624\ud508\uc18c\uc2a4\uc778 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus \uacf5\uc2dd \ubb38\uc11c"),", ",(0,r.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\ub294 \ub2e4\uc591\ud55c \ub300\uc0c1\uc73c\ub85c\ubd80\ud130 Metric\uc744 \uc218\uc9d1\ud558\ub294 \ub3c4\uad6c\uc774\uba70, \uadf8\ub77c\ud30c\ub098\ub294 \ubaa8\uc778 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc11c\ub85c \uac04\uc758 \uc885\uc18d\uc131\uc740 \uc5c6\uc9c0\ub9cc \uc0c1\ud638 \ubcf4\uc644\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc5b4 \ud568\uaed8 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud55c \ub4a4, Seldon-Core \ub85c \uc0dd\uc131\ud55c SeldonDeployment \ub85c API \uc694\uccad\uc744 \ubcf4\ub0b4, \uc815\uc0c1\uc801\uc73c\ub85c Metrics \uc774 \uc218\uc9d1\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ubcf8 \uae00\uc5d0\uc11c\ub294 seldonio/seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud558\uace0, Seldon-Core \uc5d0\uc11c \uc0dd\uc131\ud55c SeldonDeployment\uc758 Metrics \uc744 \ud6a8\uc728\uc801\uc73c\ub85c \ud655\uc778\ud558\uae30 \uc704\ud55c \ub300\uc2dc\ubcf4\ub4dc\ub3c4 \ud568\uaed8 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,r.kt)("p",null,"seldon-system namespace \uc5d0 6\uac1c\uc758 seldon-core-analytics \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uadf8\ub77c\ud30c\ub098\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-install",src:n(7796).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d\uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Email or username : ",(0,r.kt)("inlineCode",{parentName:"li"},"admin")),(0,r.kt)("li",{parentName:"ul"},"Password : ",(0,r.kt)("inlineCode",{parentName:"li"},"password"))),(0,r.kt)("p",null,"\ub85c\uadf8\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-login",src:n(7397).Z,width:"3640",height:"2140"})),(0,r.kt)("p",null,"\uc88c\uce21\uc758 \ub300\uc2dc\ubcf4\ub4dc \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Manage")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard-click",src:n(426).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\uae30\ubcf8\uc801\uc778 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\uac00 \ud3ec\ud568\ub418\uc5b4\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard",src:n(6274).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"Seldon Core API Dashboard \uac00 \ubcf4\uc774\uace0, \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-dashboard",src:n(1095).Z,width:"5016",height:"2826"})),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},426:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},6274:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},7796:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},7397:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},1095:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2429],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>h});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var p=a.createContext({}),i=function(e){var t=a.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=i(e.components);return a.createElement(p.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,p=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=i(n),m=r,h=d["".concat(p,".").concat(m)]||d[m]||u[m]||o;return n?a.createElement(h,l(l({ref:t},c),{},{components:n})):a.createElement(h,l({ref:t},c))}));function h(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s[d]="string"==typeof e?e:r,l[1]=s;for(var i=2;i{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>s,toc:()=>i});var a=n(7462),r=(n(7294),n(3905));const o={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-components/install-components-pg",id:"version-1.0/setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/versioned_docs/version-1.0/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/docs/1.0/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/docs/1.0/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/docs/1.0/kubeflow-dashboard-guide/intro"}},p={},i=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Helm Repository \ucd94\uac00",id:"helm-repository-\ucd94\uac00",level:3},{value:"Helm Repository \uc5c5\ub370\uc774\ud2b8",id:"helm-repository-\uc5c5\ub370\uc774\ud2b8",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"\uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:3},{value:"References",id:"references",level:2}],c={toc:i},d="wrapper";function u(e){let{components:t,...o}=e;return(0,r.kt)(d,(0,a.Z)({},c,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4(Prometheus) \uc640 \uadf8\ub77c\ud30c\ub098(Grafana) \ub294 \ubaa8\ub2c8\ud130\ub9c1\uc744 \uc704\ud55c \ub3c4\uad6c\uc785\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc548\uc815\uc801\uc778 \uc11c\ube44\uc2a4 \uc6b4\uc601\uc744 \uc704\ud574\uc11c\ub294 \uc11c\ube44\uc2a4\uc640 \uc11c\ube44\uc2a4\uac00 \uc6b4\uc601\ub418\uace0 \uc788\ub294 \uc778\ud504\ub77c\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ucc30\ud558\uace0, \uad00\ucc30\ud55c \uba54\ud2b8\ub9ad\uc744 \ubc14\ud0d5\uc73c\ub85c \ubb38\uc81c\uac00 \uc0dd\uae38 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ubaa8\ub2c8\ud130\ub9c1\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud558\uae30 \uc704\ud55c \ub9ce\uc740 \ub3c4\uad6c \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc624\ud508\uc18c\uc2a4\uc778 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc0ac\uc6a9\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub354 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 ",(0,r.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus \uacf5\uc2dd \ubb38\uc11c"),", ",(0,r.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana \uacf5\uc2dd \ubb38\uc11c"),"\ub97c \ud655\uc778\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\ub294 \ub2e4\uc591\ud55c \ub300\uc0c1\uc73c\ub85c\ubd80\ud130 Metric\uc744 \uc218\uc9d1\ud558\ub294 \ub3c4\uad6c\uc774\uba70, \uadf8\ub77c\ud30c\ub098\ub294 \ubaa8\uc778 \ub370\uc774\ud130\ub97c \uc2dc\uac01\ud654\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc11c\ub85c \uac04\uc758 \uc885\uc18d\uc131\uc740 \uc5c6\uc9c0\ub9cc \uc0c1\ud638 \ubcf4\uc644\uc801\uc73c\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc5b4 \ud568\uaed8 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\uac00 \ub9ce\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud55c \ub4a4, Seldon-Core \ub85c \uc0dd\uc131\ud55c SeldonDeployment \ub85c API \uc694\uccad\uc744 \ubcf4\ub0b4, \uc815\uc0c1\uc801\uc73c\ub85c Metrics \uc774 \uc218\uc9d1\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ubcf8 \uae00\uc5d0\uc11c\ub294 seldonio/seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\ub97c \uc124\uce58\ud558\uace0, Seldon-Core \uc5d0\uc11c \uc0dd\uc131\ud55c SeldonDeployment\uc758 Metrics \uc744 \ud6a8\uc728\uc801\uc73c\ub85c \ud655\uc778\ud558\uae30 \uc704\ud55c \ub300\uc2dc\ubcf4\ub4dc\ub3c4 \ud568\uaed8 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"helm-repository-\ucd94\uac00"},"Helm Repository \ucd94\uac00"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \ucd94\uac00\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,r.kt)("h3",{id:"helm-repository-\uc5c5\ub370\uc774\ud2b8"},"Helm Repository \uc5c5\ub370\uc774\ud2b8"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc5c5\ub370\uc774\ud2b8\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,r.kt)("h3",{id:"helm-install"},"Helm Install"),(0,r.kt)("p",null,"seldon-core-analytics Helm Chart 1.12.0 \ubc84\uc804\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,r.kt)("p",null,"seldon-system namespace \uc5d0 6\uac1c\uc758 seldon-core-analytics \uad00\ub828 pod \uac00 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,r.kt)("h3",{id:"\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"\uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \uadf8\ub77c\ud30c\ub098\uc5d0 \uc815\uc0c1\uc801\uc73c\ub85c \uc811\uc18d\ub418\ub294\uc9c0 \ud655\uc778\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc6b0\uc120 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc5d0\uc11c \uc811\uc18d\ud558\uae30 \uc704\ud574, \ud3ec\ud2b8\ud3ec\uc6cc\ub529\uc744 \uc218\ud589\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 ",(0,r.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),"\uc73c\ub85c \uc811\uc18d\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-install",src:n(7796).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uc811\uc18d\uc815\ubcf4\ub97c \uc785\ub825\ud558\uc5ec \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Email or username : ",(0,r.kt)("inlineCode",{parentName:"li"},"admin")),(0,r.kt)("li",{parentName:"ul"},"Password : ",(0,r.kt)("inlineCode",{parentName:"li"},"password"))),(0,r.kt)("p",null,"\ub85c\uadf8\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"grafana-login",src:n(7397).Z,width:"3640",height:"2140"})),(0,r.kt)("p",null,"\uc88c\uce21\uc758 \ub300\uc2dc\ubcf4\ub4dc \uc544\uc774\ucf58\uc744 \ud074\ub9ad\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"Manage")," \ubc84\ud2bc\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard-click",src:n(426).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"\uae30\ubcf8\uc801\uc778 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\uac00 \ud3ec\ud568\ub418\uc5b4\uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \uc911 ",(0,r.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"dashboard",src:n(6274).Z,width:"5016",height:"2826"})),(0,r.kt)("p",null,"Seldon Core API Dashboard \uac00 \ubcf4\uc774\uace0, \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"seldon-dashboard",src:n(1095).Z,width:"5016",height:"2826"})),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},426:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},6274:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},7796:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},7397:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},1095:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file diff --git a/assets/js/d3303310.0bd8ebb4.js b/assets/js/d3303310.65ad5c36.js similarity index 99% rename from assets/js/d3303310.0bd8ebb4.js rename to assets/js/d3303310.65ad5c36.js index 21ab2eb0..2c3c1868 100644 --- a/assets/js/d3303310.0bd8ebb4.js +++ b/assets/js/d3303310.65ad5c36.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7346],{3905:(e,n,t)=>{t.d(n,{Zo:()=>u,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},u=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),d=p(t),m=r,k=d["".concat(s,".").concat(m)]||d[m]||c[m]||i;return t?a.createElement(k,o(o({ref:n},u),{},{components:t})):a.createElement(k,o({ref:n},u))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[d]="string"==typeof e?e:r,o[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var a=t(7462),r=(t(7294),t(3905));const i={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,l={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"version-1.0/setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/versioned_docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/docs/1.0/setup-components/install-components-kf"}},s={},p=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. NVIDIA-Docker \uc124\uce58",id:"2-nvidia-docker-\uc124\uce58",level:2},{value:"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815",id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],u={toc:p},d="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,a.Z)({},u,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc0f Kubeflow \ub4f1\uc5d0\uc11c GP \ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi")," \uc218\ud589 \uc2dc \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub41c\ub2e4\uba74 \uc774 \ub2e8\uacc4\ub294 \uc0dd\ub7b5\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),"\uc758 \ucd9c\ub825 \uacb0\uacfc\uac00 \uc704\uc640 \uac19\uc9c0 \uc54a\ub2e4\uba74 \uc7a5\ucc29\ub41c GPU\uc5d0 \ub9de\ub294 nvidia driver\ub97c \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d nvidia driver\uc758 \uc124\uce58\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\ub2e4\uba74 \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,r.kt)("h2",{id:"2-nvidia-docker-\uc124\uce58"},"2. NVIDIA-Docker \uc124\uce58"),(0,r.kt)("p",null,"NVIDIA-Docker\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uae30 \uc704\ud574, GPU\ub97c \uc0ac\uc6a9\ud558\ub294 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574\ubd05\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("h2",{id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815"},"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker-CE\ub97c Default Container Runtime\uc73c\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\ub530\ub77c\uc11c, Docker Container \ub0b4\uc5d0\uc11c NVIDIA GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NVIDIA-Docker \ub97c Container Runtime \uc73c\ub85c \uc0ac\uc6a9\ud558\uc5ec pod\ub97c \uc0dd\uc131\ud560 \uc218 \uc788\ub3c4\ub85d Default Runtime\uc744 \uc218\uc815\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," \ud30c\uc77c\uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud30c\uc77c\uc774 \ubcc0\uacbd\ub41c \uac83\uc744 \ud655\uc778\ud55c \ud6c4, Docker\ub97c \uc7ac\uc2dc\uc791\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubcc0\uacbd \uc0ac\ud56d\uc774 \ubc18\uc601\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,r.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin daemonset\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin pod\uc774 RUNNING \uc0c1\ud0dc\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"node \uc815\ubcf4\uc5d0 gpu\uac00 \uc0ac\uc6a9\uac00\ub2a5\ud558\ub3c4\ub85d \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","(",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c \ud074\ub7ec\uc2a4\ud130\ub294 2\uac1c\uc758 GPU\uac00 \uc788\uc5b4\uc11c 2\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4.\n\ubcf8\uc778\uc758 \ud074\ub7ec\uc2a4\ud130\uc758 GPU \uac1c\uc218\uc640 \ub9de\ub294 \uc22b\uc790\uac00 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub429\ub2c8\ub2e4.)"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")))),(0,r.kt)("p",null,"\uc124\uc815\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0, GPU\uc758 value\uac00 ",(0,r.kt)("inlineCode",{parentName:"p"},"")," \uc73c\ub85c \ud45c\uc2dc\ub429\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7346],{3905:(e,n,t)=>{t.d(n,{Zo:()=>u,kt:()=>k});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},u=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),d=p(t),m=r,k=d["".concat(s,".").concat(m)]||d[m]||c[m]||i;return t?a.createElement(k,o(o({ref:n},u),{},{components:t})):a.createElement(k,o({ref:n},u))}));function k(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[d]="string"==typeof e?e:r,o[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var a=t(7462),r=(t(7294),t(3905));const i={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,l={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"version-1.0/setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/versioned_docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/docs/1.0/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/docs/1.0/setup-components/install-components-kf"}},s={},p=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. NVIDIA-Docker \uc124\uce58",id:"2-nvidia-docker-\uc124\uce58",level:2},{value:"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815",id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],u={toc:p},d="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,a.Z)({},u,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc0f Kubeflow \ub4f1\uc5d0\uc11c GP \ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi")," \uc218\ud589 \uc2dc \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub41c\ub2e4\uba74 \uc774 \ub2e8\uacc4\ub294 \uc0dd\ub7b5\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),"\uc758 \ucd9c\ub825 \uacb0\uacfc\uac00 \uc704\uc640 \uac19\uc9c0 \uc54a\ub2e4\uba74 \uc7a5\ucc29\ub41c GPU\uc5d0 \ub9de\ub294 nvidia driver\ub97c \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ub9cc\uc57d nvidia driver\uc758 \uc124\uce58\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\ub2e4\uba74 \uc544\ub798 \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \uc124\uce58\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,r.kt)("h2",{id:"2-nvidia-docker-\uc124\uce58"},"2. NVIDIA-Docker \uc124\uce58"),(0,r.kt)("p",null,"NVIDIA-Docker\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud558\uae30 \uc704\ud574, GPU\ub97c \uc0ac\uc6a9\ud558\ub294 \ub3c4\ucee4 \ucee8\ud14c\uc774\ub108\ub97c \uc2e4\ud589\ud574\ubd05\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,r.kt)("h2",{id:"3-nvidia-docker\ub97c-default-container-runtime\uc73c\ub85c-\uc124\uc815"},"3. NVIDIA-Docker\ub97c Default Container Runtime\uc73c\ub85c \uc124\uc815"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \uae30\ubcf8\uc801\uc73c\ub85c Docker-CE\ub97c Default Container Runtime\uc73c\ub85c \uc0ac\uc6a9\ud569\ub2c8\ub2e4.\n\ub530\ub77c\uc11c, Docker Container \ub0b4\uc5d0\uc11c NVIDIA GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 NVIDIA-Docker \ub97c Container Runtime \uc73c\ub85c \uc0ac\uc6a9\ud558\uc5ec pod\ub97c \uc0dd\uc131\ud560 \uc218 \uc788\ub3c4\ub85d Default Runtime\uc744 \uc218\uc815\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," \ud30c\uc77c\uc744 \uc5f4\uc5b4 \ub2e4\uc74c\uacfc \uac19\uc774 \uc218\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud30c\uc77c\uc774 \ubcc0\uacbd\ub41c \uac83\uc744 \ud655\uc778\ud55c \ud6c4, Docker\ub97c \uc7ac\uc2dc\uc791\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubcc0\uacbd \uc0ac\ud56d\uc774 \ubc18\uc601\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,r.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin daemonset\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"nvidia-device-plugin pod\uc774 RUNNING \uc0c1\ud0dc\ub85c \uc0dd\uc131\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\uac00 \ucd9c\ub825\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"node \uc815\ubcf4\uc5d0 gpu\uac00 \uc0ac\uc6a9\uac00\ub2a5\ud558\ub3c4\ub85d \uc124\uc815\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","(",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc9c4\ud589\ud55c \ud074\ub7ec\uc2a4\ud130\ub294 2\uac1c\uc758 GPU\uac00 \uc788\uc5b4\uc11c 2\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4.\n\ubcf8\uc778\uc758 \ud074\ub7ec\uc2a4\ud130\uc758 GPU \uac1c\uc218\uc640 \ub9de\ub294 \uc22b\uc790\uac00 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub429\ub2c8\ub2e4.)"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")))),(0,r.kt)("p",null,"\uc124\uc815\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0, GPU\uc758 value\uac00 ",(0,r.kt)("inlineCode",{parentName:"p"},"")," \uc73c\ub85c \ud45c\uc2dc\ub429\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d5c893db.4c4cecc1.js b/assets/js/d5c893db.f9db721e.js similarity index 97% rename from assets/js/d5c893db.4c4cecc1.js rename to assets/js/d5c893db.f9db721e.js index 725ed0c2..997686c7 100644 --- a/assets/js/d5c893db.4c4cecc1.js +++ b/assets/js/d5c893db.f9db721e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3225],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>d});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function l(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var c=n.createContext({}),p=function(e){var t=n.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(c.Provider,{value:t},e.children)},s="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},b=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,i=e.originalType,c=e.parentName,u=a(e,["components","mdxType","originalType","parentName"]),s=p(r),b=o,d=s["".concat(c,".").concat(b)]||s[b]||f[b]||i;return r?n.createElement(d,l(l({ref:t},u),{},{components:r})):n.createElement(d,l({ref:t},u))}));function d(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=r.length,l=new Array(i);l[0]=b;var a={};for(var c in t)hasOwnProperty.call(t,c)&&(a[c]=t[c]);a.originalType=e,a[s]="string"==typeof e?e:o,l[1]=a;for(var p=2;p{r.r(t),r.d(t,{assets:()=>c,contentTitle:()=>l,default:()=>f,frontMatter:()=>i,metadata:()=>a,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},l=void 0,a={unversionedId:"kubeflow/kubeflow-intro",id:"kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/docs/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/docs/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/docs/kubeflow/kubeflow-concepts"}},c={},p=[],u={toc:p},s="wrapper";function f(e){let{components:t,...r}=e;return(0,o.kt)(s,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Kubeflow\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8(Component)\uc640 \ud30c\uc774\ud504\ub77c\uc778(Pipeline)\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline \uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uacfc\ub294 \ub2e4\uc18c \ucc28\uc774\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc5ec\uae30\uc5d0\uc11c\ub294 Kubeflow Pipeline\uc744 \uc6cc\ud06c\ud50c\ub85c(Workflow)\uac00 \uc544\ub2cc \uc55e\uc11c \uc124\uba85\ud55c ",(0,o.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts#component-contents"},"MLOps\ub97c \uad6c\uc131\ud558\ub294 \uc694\uc18c")," \uc911 \ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubb34\uc5c7\uc774\uba70 \uc5b4\ub5bb\uac8c \uc791\uc131\ud560 \uc218 \uc788\ub294\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}f.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3225],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>d});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function l(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var c=n.createContext({}),p=function(e){var t=n.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(c.Provider,{value:t},e.children)},s="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},b=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,i=e.originalType,c=e.parentName,u=a(e,["components","mdxType","originalType","parentName"]),s=p(r),b=o,d=s["".concat(c,".").concat(b)]||s[b]||f[b]||i;return r?n.createElement(d,l(l({ref:t},u),{},{components:r})):n.createElement(d,l({ref:t},u))}));function d(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=r.length,l=new Array(i);l[0]=b;var a={};for(var c in t)hasOwnProperty.call(t,c)&&(a[c]=t[c]);a.originalType=e,a[s]="string"==typeof e?e:o,l[1]=a;for(var p=2;p{r.r(t),r.d(t,{assets:()=>c,contentTitle:()=>l,default:()=>f,frontMatter:()=>i,metadata:()=>a,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},l=void 0,a={unversionedId:"kubeflow/kubeflow-intro",id:"kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/docs/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/docs/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/docs/kubeflow/kubeflow-concepts"}},c={},p=[],u={toc:p},s="wrapper";function f(e){let{components:t,...r}=e;return(0,o.kt)(s,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Kubeflow\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8(Component)\uc640 \ud30c\uc774\ud504\ub77c\uc778(Pipeline)\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline \uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uacfc\ub294 \ub2e4\uc18c \ucc28\uc774\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc5ec\uae30\uc5d0\uc11c\ub294 Kubeflow Pipeline\uc744 \uc6cc\ud06c\ud50c\ub85c(Workflow)\uac00 \uc544\ub2cc \uc55e\uc11c \uc124\uba85\ud55c ",(0,o.kt)("a",{parentName:"p",href:"/docs/kubeflow/kubeflow-concepts#component-contents"},"MLOps\ub97c \uad6c\uc131\ud558\ub294 \uc694\uc18c")," \uc911 \ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubb34\uc5c7\uc774\uba70 \uc5b4\ub5bb\uac8c \uc791\uc131\ud560 \uc218 \uc788\ub294\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d6a8d944.4e2a1683.js b/assets/js/d6a8d944.69139e25.js similarity index 99% rename from assets/js/d6a8d944.4e2a1683.js rename to assets/js/d6a8d944.69139e25.js index 1426c63c..5cb54bf4 100644 --- a/assets/js/d6a8d944.4e2a1683.js +++ b/assets/js/d6a8d944.69139e25.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8860],{3905:(n,e,a)=>{a.d(e,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(n,e,a){return e in n?Object.defineProperty(n,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):n[e]=a,n}function l(n,e){var a=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),a.push.apply(a,t)}return a}function i(n){for(var e=1;e=0||(r[a]=n[a]);return r}(n,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,a)&&(r[a]=n[a])}return r}var o=t.createContext({}),s=function(n){var e=t.useContext(o),a=e;return n&&(a="function"==typeof n?n(e):i(i({},e),n)),a},d=function(n){var e=s(n.components);return t.createElement(o.Provider,{value:e},n.children)},m="mdxType",_={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},u=t.forwardRef((function(n,e){var a=n.components,r=n.mdxType,l=n.originalType,o=n.parentName,d=p(n,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(o,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:e},d),{},{components:a})):t.createElement(c,i({ref:e},d))}));function c(n,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof n||r){var l=a.length,i=new Array(l);i[0]=u;var p={};for(var o in e)hasOwnProperty.call(e,o)&&(p[o]=e[o]);p.originalType=n,p[m]="string"==typeof n?n:r,i[1]=p;for(var s=2;s{a.r(e),a.d(e,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-mlflow",id:"kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/docs/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/docs/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/docs/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/docs/kubeflow/how-to-debug"}},o={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. \ubaa8\ub378 \ud559\uc2b5",id:"1-\ubaa8\ub378-\ud559\uc2b5",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(n){let{components:e,...l}=n;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-component"},"Advanced Usage Component")," \uc5d0\uc11c \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 API Deployment\uae4c\uc9c0 \uc774\uc5b4\uc9c0\uae30 \uc704\ud574\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"MLFlow\uc5d0\uc11c \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uace0 \uc11c\ube59\uc5d0\uc11c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uc758 \ud56d\ubaa9\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"\ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \ud1b5\ud574\uc11c MLFLow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"1-\ubaa8\ub378-\ud559\uc2b5"},"1. \ubaa8\ub378 \ud559\uc2b5"),(0,r.kt)("p",null,"\uc544\ub798 \uacfc\uc815\uc740 iris \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 SVC \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"mlflow\uc5d0 \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \ub9cc\ub4dc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"\uac01 \ubcc0\uc218\uc758 \ub0b4\uc6a9\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \ud559\uc2b5\ud55c \uc815\ubcf4\ub4e4\uacfc \ubaa8\ub378\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4.\n\ud559\uc2b5\ud55c \ubaa8\ub378\uc774 sklearn \ud328\ud0a4\uc9c0\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn")," \uc744 \uc774\uc6a9\ud558\uba74 \uc27d\uac8c \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uc791\uc5c5\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 svc \ud3f4\ub354\uac00 \uc0dd\uae30\uba70 \uc544\ub798\uc640 \uac19\uc740 \ud30c\uc77c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uc758 \ucd9c\ub825\uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"\uac01 \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"\uc774\uc81c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 mlflow \uc11c\ubc84\uc5d0 \uc62c\ub9ac\ub294 \uc791\uc5c5\uc744 \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"\uc800\uc7a5\ud558\uace0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," \uac00 \uc0dd\uc131\ub41c \uacbd\ub85c\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 mlflow \uc11c\ubc84\uc640 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ub744\uc6c1\ub2c8\ub2e4.\nmlflow \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud558\uc5ec \uc0dd\uc131\ub41c run\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(4701).Z,width:"2782",height:"2496"}),"\n(\ud574\ub2f9 \ud654\uba74\uc740 mlflow \ubc84\uc804\uc5d0 \ub530\ub77c \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"\uc774\uc81c Kubeflow\uc5d0\uc11c \uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc740 \ud06c\uac8c 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \ud544\uc694\ud55c \ud658\uacbd\uc744 \uc800\uc7a5 \ud6c4 MLFlow \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc5c5\ub85c\ub4dc\ub9cc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(6752).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uacfc \ub370\uc774\ud130\ub97c MLFlow \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \uc804\ub2ec \ud6c4 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(6313).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc\ub97c \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(4119).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"\uc800\ud76c\ub294 \uc774 \uc911 1\ubc88\uc758 \uc811\uadfc \ubc29\ubc95\uc744 \ud1b5\ud574 \ubaa8\ub378\uc744 \uad00\ub9ac\ud558\ub824\uace0 \ud569\ub2c8\ub2e4.\n\uc774\uc720\ub294 MLFlow \ubaa8\ub378\uc744 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucf54\ub4dc\ub294 \ubc14\ub00c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \ub9e4\ubc88 3\ubc88\ucc98\ub7fc \ucef4\ud3ec\ub10c\ud2b8 \uc791\uc131\ub9c8\ub2e4 \uc791\uc131\ud560 \ud544\uc694\ub294 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc7ac\ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc740 1\ubc88\uacfc 2\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uac00\ub2a5\ud569\ub2c8\ub2e4.\n\ub2e4\ub9cc 2\ubc88\uc758 \uacbd\uc6b0 \ubaa8\ub378\uc774 \ud559\uc2b5\ub41c \uc774\ubbf8\uc9c0\uc640 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc804\ub2ec\ud574\uc57c \ud558\ubbc0\ub85c \uacb0\uad6d \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \ucd94\uac00 \uc815\ubcf4\ub97c \uc804\ub2ec\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"1\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uc9c4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub610\ud55c \ubcc0\uacbd\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294\ub370 \ud544\uc694\ud55c \ud658\uacbd\ub4e4\uc744 \uc800\uc7a5\ud574\uc8fc\ub294 \ucf54\ub4dc\uac00 \ucd94\uac00\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 MLFlow\uc5d0 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4.\n\uc774 \ub54c \uc5c5\ub85c\ub4dc\ub418\ub294 MLflow\uc758 endpoint\ub97c \uc6b0\ub9ac\uac00 \uc124\uce58\ud55c ",(0,r.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-mlflow"},"mlflow service")," \ub85c \uc774\uc5b4\uc9c0\uac8c \uc124\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c S3 Endpoint\uc758 \uc8fc\uc18c\ub294 MLflow Server \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c minio\uc758 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/services-networking/dns-pod-service/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9"),"\ud569\ub2c8\ub2e4. \ud574\ub2f9 service \ub294 kubeflow namespace\uc5d0\uc11c minio-service\ub77c\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\uc73c\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\uc640 \ube44\uc2b7\ud558\uac8c tracking_uri\uc758 \uc8fc\uc18c\ub294 mlflow server\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc5f0\uacb0\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \ub9cc\ub4e4\uc5b4 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"\ubaa8\ub378\uc744 \ud559\uc2b5\ud560 \ub54c \uc4f8 \ub370\uc774\ud130\ub294 sklearn\uc758 iris \uc785\ub2c8\ub2e4.\n\ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778 \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ud558\ub098\uc758 \ud30c\uc774\uc36c \ud30c\uc77c\uc5d0 \uc815\ub9ac\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"\uc2e4\ud589\ud6c4 \uc0dd\uc131\ub41c mlflow_pipeline.yaml \ud30c\uc77c\uc744 \ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ud55c \ud6c4, \uc2e4\ud589\ud558\uc5ec run \uc758 \uacb0\uacfc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(6516).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"mlflow service\ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud574\uc11c MLflow ui\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 localhost:5000\uc73c\ub85c \uc811\uc18d\ud558\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 run\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(3702).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"run \uc744 \ud074\ub9ad\ud574\uc11c \ud655\uc778\ud558\uba74 \ud559\uc2b5\ud55c \ubaa8\ub378 \ud30c\uc77c\uc774 \uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(5543).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},4701:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},6752:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},6313:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},4119:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},6516:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},3702:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},5543:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8860],{3905:(n,e,a)=>{a.d(e,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(n,e,a){return e in n?Object.defineProperty(n,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):n[e]=a,n}function l(n,e){var a=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),a.push.apply(a,t)}return a}function i(n){for(var e=1;e=0||(r[a]=n[a]);return r}(n,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,a)&&(r[a]=n[a])}return r}var o=t.createContext({}),s=function(n){var e=t.useContext(o),a=e;return n&&(a="function"==typeof n?n(e):i(i({},e),n)),a},d=function(n){var e=s(n.components);return t.createElement(o.Provider,{value:e},n.children)},m="mdxType",_={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},u=t.forwardRef((function(n,e){var a=n.components,r=n.mdxType,l=n.originalType,o=n.parentName,d=p(n,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(o,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:e},d),{},{components:a})):t.createElement(c,i({ref:e},d))}));function c(n,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof n||r){var l=a.length,i=new Array(l);i[0]=u;var p={};for(var o in e)hasOwnProperty.call(e,o)&&(p[o]=e[o]);p.originalType=n,p[m]="string"==typeof n?n:r,i[1]=p;for(var s=2;s{a.r(e),a.d(e,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-mlflow",id:"kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/docs/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/docs/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/docs/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/docs/kubeflow/how-to-debug"}},o={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. \ubaa8\ub378 \ud559\uc2b5",id:"1-\ubaa8\ub378-\ud559\uc2b5",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(n){let{components:e,...l}=n;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-component"},"Advanced Usage Component")," \uc5d0\uc11c \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 API Deployment\uae4c\uc9c0 \uc774\uc5b4\uc9c0\uae30 \uc704\ud574\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"MLFlow\uc5d0\uc11c \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uace0 \uc11c\ube59\uc5d0\uc11c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uc758 \ud56d\ubaa9\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"\ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \ud1b5\ud574\uc11c MLFLow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"1-\ubaa8\ub378-\ud559\uc2b5"},"1. \ubaa8\ub378 \ud559\uc2b5"),(0,r.kt)("p",null,"\uc544\ub798 \uacfc\uc815\uc740 iris \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 SVC \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"mlflow\uc5d0 \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \ub9cc\ub4dc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"\uac01 \ubcc0\uc218\uc758 \ub0b4\uc6a9\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \ud559\uc2b5\ud55c \uc815\ubcf4\ub4e4\uacfc \ubaa8\ub378\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4.\n\ud559\uc2b5\ud55c \ubaa8\ub378\uc774 sklearn \ud328\ud0a4\uc9c0\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn")," \uc744 \uc774\uc6a9\ud558\uba74 \uc27d\uac8c \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uc791\uc5c5\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 svc \ud3f4\ub354\uac00 \uc0dd\uae30\uba70 \uc544\ub798\uc640 \uac19\uc740 \ud30c\uc77c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uc758 \ucd9c\ub825\uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"\uac01 \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"\uc774\uc81c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 mlflow \uc11c\ubc84\uc5d0 \uc62c\ub9ac\ub294 \uc791\uc5c5\uc744 \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"\uc800\uc7a5\ud558\uace0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," \uac00 \uc0dd\uc131\ub41c \uacbd\ub85c\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 mlflow \uc11c\ubc84\uc640 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ub744\uc6c1\ub2c8\ub2e4.\nmlflow \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud558\uc5ec \uc0dd\uc131\ub41c run\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(4701).Z,width:"2782",height:"2496"}),"\n(\ud574\ub2f9 \ud654\uba74\uc740 mlflow \ubc84\uc804\uc5d0 \ub530\ub77c \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"\uc774\uc81c Kubeflow\uc5d0\uc11c \uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc740 \ud06c\uac8c 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \ud544\uc694\ud55c \ud658\uacbd\uc744 \uc800\uc7a5 \ud6c4 MLFlow \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc5c5\ub85c\ub4dc\ub9cc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(6752).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uacfc \ub370\uc774\ud130\ub97c MLFlow \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \uc804\ub2ec \ud6c4 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(6313).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc\ub97c \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(4119).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"\uc800\ud76c\ub294 \uc774 \uc911 1\ubc88\uc758 \uc811\uadfc \ubc29\ubc95\uc744 \ud1b5\ud574 \ubaa8\ub378\uc744 \uad00\ub9ac\ud558\ub824\uace0 \ud569\ub2c8\ub2e4.\n\uc774\uc720\ub294 MLFlow \ubaa8\ub378\uc744 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucf54\ub4dc\ub294 \ubc14\ub00c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \ub9e4\ubc88 3\ubc88\ucc98\ub7fc \ucef4\ud3ec\ub10c\ud2b8 \uc791\uc131\ub9c8\ub2e4 \uc791\uc131\ud560 \ud544\uc694\ub294 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc7ac\ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc740 1\ubc88\uacfc 2\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uac00\ub2a5\ud569\ub2c8\ub2e4.\n\ub2e4\ub9cc 2\ubc88\uc758 \uacbd\uc6b0 \ubaa8\ub378\uc774 \ud559\uc2b5\ub41c \uc774\ubbf8\uc9c0\uc640 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc804\ub2ec\ud574\uc57c \ud558\ubbc0\ub85c \uacb0\uad6d \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \ucd94\uac00 \uc815\ubcf4\ub97c \uc804\ub2ec\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"1\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uc9c4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub610\ud55c \ubcc0\uacbd\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294\ub370 \ud544\uc694\ud55c \ud658\uacbd\ub4e4\uc744 \uc800\uc7a5\ud574\uc8fc\ub294 \ucf54\ub4dc\uac00 \ucd94\uac00\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 MLFlow\uc5d0 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4.\n\uc774 \ub54c \uc5c5\ub85c\ub4dc\ub418\ub294 MLflow\uc758 endpoint\ub97c \uc6b0\ub9ac\uac00 \uc124\uce58\ud55c ",(0,r.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-mlflow"},"mlflow service")," \ub85c \uc774\uc5b4\uc9c0\uac8c \uc124\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c S3 Endpoint\uc758 \uc8fc\uc18c\ub294 MLflow Server \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c minio\uc758 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/services-networking/dns-pod-service/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9"),"\ud569\ub2c8\ub2e4. \ud574\ub2f9 service \ub294 kubeflow namespace\uc5d0\uc11c minio-service\ub77c\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\uc73c\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\uc640 \ube44\uc2b7\ud558\uac8c tracking_uri\uc758 \uc8fc\uc18c\ub294 mlflow server\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc5f0\uacb0\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \ub9cc\ub4e4\uc5b4 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"\ubaa8\ub378\uc744 \ud559\uc2b5\ud560 \ub54c \uc4f8 \ub370\uc774\ud130\ub294 sklearn\uc758 iris \uc785\ub2c8\ub2e4.\n\ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778 \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ud558\ub098\uc758 \ud30c\uc774\uc36c \ud30c\uc77c\uc5d0 \uc815\ub9ac\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"\uc2e4\ud589\ud6c4 \uc0dd\uc131\ub41c mlflow_pipeline.yaml \ud30c\uc77c\uc744 \ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ud55c \ud6c4, \uc2e4\ud589\ud558\uc5ec run \uc758 \uacb0\uacfc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(6516).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"mlflow service\ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud574\uc11c MLflow ui\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 localhost:5000\uc73c\ub85c \uc811\uc18d\ud558\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 run\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(3702).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"run \uc744 \ud074\ub9ad\ud574\uc11c \ud655\uc778\ud558\uba74 \ud559\uc2b5\ud55c \ubaa8\ub378 \ud30c\uc77c\uc774 \uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(5543).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},4701:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},6752:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},6313:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},4119:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},6516:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},3702:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},5543:(n,e,a)=>{a.d(e,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file diff --git a/assets/js/d7d2d94a.26ef7de0.js b/assets/js/d7d2d94a.0b4e4bf4.js similarity index 97% rename from assets/js/d7d2d94a.26ef7de0.js rename to assets/js/d7d2d94a.0b4e4bf4.js index 824bf6e9..22aeb162 100644 --- a/assets/js/d7d2d94a.26ef7de0.js +++ b/assets/js/d7d2d94a.0b4e4bf4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8309],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=r.createContext({}),s=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=s(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),c=s(n),m=o,f=c["".concat(i,".").concat(m)]||c[m]||u[m]||a;return n?r.createElement(f,l(l({ref:t},d),{},{components:n})):r.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=m;var p={};for(var i in t)hasOwnProperty.call(t,i)&&(p[i]=t[i]);p.originalType=e,p[c]="string"==typeof e?e:o,l[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>a,metadata:()=>p,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"api-deployment/seldon-pg",id:"api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/docs/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/docs/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/docs/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/docs/api-deployment/seldon-fields"}},i={},s=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"\ub300\uc2dc\ubcf4\ub4dc",id:"\ub300\uc2dc\ubcf4\ub4dc",level:3},{value:"API \uc694\uccad",id:"api-\uc694\uccad",level:3}],d={toc:s},c="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,r.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,o.kt)("p",null,"\uc774\uc81c, ",(0,o.kt)("a",{parentName:"p",href:"/docs/api-deployment/seldon-iris"},"\uc9c0\ub09c \ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc0dd\uc131\ud588\ub358 SeldonDeployment \ub85c API Request \ub97c \ubc18\ubcf5\uc801\uc73c\ub85c \uc218\ud589\ud574\ubcf4\uace0, \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \ubcc0\ud654\uac00 \uc77c\uc5b4\ub098\ub294\uc9c0 \ud655\uc778\ud574\ubd05\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"\ub300\uc2dc\ubcf4\ub4dc"},"\ub300\uc2dc\ubcf4\ub4dc"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-pg"},"\uc55e\uc11c \uc0dd\uc131\ud55c \ub300\uc2dc\ubcf4\ub4dc"),"\ub97c \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("h3",{id:"api-\uc694\uccad"},"API \uc694\uccad"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/api-deployment/seldon-iris#using-cli"},"\uc55e\uc11c \uc0dd\uc131\ud55c Seldon Deployment"),"\uc5d0 \uc694\uccad\uc744 ",(0,o.kt)("strong",{parentName:"p"},"\ubc18\ubcf5\ud574\uc11c")," \ubcf4\ub0c5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,o.kt)("p",null,"\uadf8\ub9ac\uace0 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 Global Request Rate \uc774 ",(0,o.kt)("inlineCode",{parentName:"p"},"0 ops")," \uc5d0\uc11c \uc21c\uac04\uc801\uc73c\ub85c \uc0c1\uc2b9\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"repeat-raise.png",src:n(7434).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"\uc774\ub807\uac8c \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}u.isMDXComponent=!0},7434:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8309],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=r.createContext({}),s=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=s(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),c=s(n),m=o,f=c["".concat(i,".").concat(m)]||c[m]||u[m]||a;return n?r.createElement(f,l(l({ref:t},d),{},{components:n})):r.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=m;var p={};for(var i in t)hasOwnProperty.call(t,i)&&(p[i]=t[i]);p.originalType=e,p[c]="string"==typeof e?e:o,l[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>a,metadata:()=>p,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"api-deployment/seldon-pg",id:"api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/docs/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/docs/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/docs/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/docs/api-deployment/seldon-fields"}},i={},s=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"\ub300\uc2dc\ubcf4\ub4dc",id:"\ub300\uc2dc\ubcf4\ub4dc",level:3},{value:"API \uc694\uccad",id:"api-\uc694\uccad",level:3}],d={toc:s},c="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,r.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,o.kt)("p",null,"\uc774\uc81c, ",(0,o.kt)("a",{parentName:"p",href:"/docs/api-deployment/seldon-iris"},"\uc9c0\ub09c \ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc0dd\uc131\ud588\ub358 SeldonDeployment \ub85c API Request \ub97c \ubc18\ubcf5\uc801\uc73c\ub85c \uc218\ud589\ud574\ubcf4\uace0, \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \ubcc0\ud654\uac00 \uc77c\uc5b4\ub098\ub294\uc9c0 \ud655\uc778\ud574\ubd05\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"\ub300\uc2dc\ubcf4\ub4dc"},"\ub300\uc2dc\ubcf4\ub4dc"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/setup-components/install-components-pg"},"\uc55e\uc11c \uc0dd\uc131\ud55c \ub300\uc2dc\ubcf4\ub4dc"),"\ub97c \ud3ec\ud2b8 \ud3ec\uc6cc\ub529\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("h3",{id:"api-\uc694\uccad"},"API \uc694\uccad"),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/api-deployment/seldon-iris#using-cli"},"\uc55e\uc11c \uc0dd\uc131\ud55c Seldon Deployment"),"\uc5d0 \uc694\uccad\uc744 ",(0,o.kt)("strong",{parentName:"p"},"\ubc18\ubcf5\ud574\uc11c")," \ubcf4\ub0c5\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,o.kt)("p",null,"\uadf8\ub9ac\uace0 \uadf8\ub77c\ud30c\ub098 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 Global Request Rate \uc774 ",(0,o.kt)("inlineCode",{parentName:"p"},"0 ops")," \uc5d0\uc11c \uc21c\uac04\uc801\uc73c\ub85c \uc0c1\uc2b9\ud558\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"repeat-raise.png",src:n(7434).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"\uc774\ub807\uac8c \ud504\ub85c\uba54\ud14c\uc6b0\uc2a4\uc640 \uadf8\ub77c\ud30c\ub098\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}u.isMDXComponent=!0},7434:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file diff --git a/assets/js/d7dc9408.46b9e217.js b/assets/js/d7dc9408.ad348719.js similarity index 98% rename from assets/js/d7dc9408.46b9e217.js rename to assets/js/d7dc9408.ad348719.js index 1d6fe3ee..fce2247d 100644 --- a/assets/js/d7dc9408.46b9e217.js +++ b/assets/js/d7dc9408.ad348719.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8225],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=u(n),f=o,d=p["".concat(s,".").concat(f)]||p[f]||m[f]||i;return n?r.createElement(d,a(a({ref:t},c),{},{components:n})):r.createElement(d,a({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,a[1]=l;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>m,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const i={title:"Community",sidebar_position:1},a=void 0,l={unversionedId:"community",id:"community",title:"Community",description:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",source:"@site/community/community.md",sourceDirName:".",slug:"/community",permalink:"/community/community",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/community.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"Community",sidebar_position:1},sidebar:"tutorialSidebar",next:{title:"How to Contribute",permalink:"/community/how-to-contribute"}},s={},u=[{value:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd",level:3},{value:"Question",id:"question",level:3},{value:"Suggestion",id:"suggestion",level:3}],c={toc:u},p="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h3",{id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd"},(0,o.kt)("em",{parentName:"h3"},"\ubaa8\ub450\uc758 MLOps")," \ub9b4\ub9ac\uc988 \uc18c\uc2dd"),(0,o.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub098 \uc218\uc815\uc0ac\ud56d\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/announcements"},"Announcements"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"question"},"Question"),(0,o.kt)("p",null,"\ud504\ub85c\uc81d\ud2b8 \ub0b4\uc6a9\uacfc \uad00\ub828\ub41c \uad81\uae08\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/q-a"},"Q&A"),"\ub97c \ud1b5\ud574 \uc9c8\ubb38\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"suggestion"},"Suggestion"),(0,o.kt)("p",null,"\uc81c\uc548\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/ideas"},"Ideas"),"\ub97c \ud1b5\ud574 \uc81c\uc548\ud574 \uc8fc\uc2dc\uba74 \ub429\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8225],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=u(n),f=o,d=p["".concat(s,".").concat(f)]||p[f]||m[f]||i;return n?r.createElement(d,a(a({ref:t},c),{},{components:n})):r.createElement(d,a({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,a[1]=l;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>m,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const i={title:"Community",sidebar_position:1},a=void 0,l={unversionedId:"community",id:"community",title:"Community",description:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",source:"@site/community/community.md",sourceDirName:".",slug:"/community",permalink:"/community/community",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/community.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"Community",sidebar_position:1},sidebar:"tutorialSidebar",next:{title:"How to Contribute",permalink:"/community/how-to-contribute"}},s={},u=[{value:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd",level:3},{value:"Question",id:"question",level:3},{value:"Suggestion",id:"suggestion",level:3}],c={toc:u},p="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h3",{id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd"},(0,o.kt)("em",{parentName:"h3"},"\ubaa8\ub450\uc758 MLOps")," \ub9b4\ub9ac\uc988 \uc18c\uc2dd"),(0,o.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub098 \uc218\uc815\uc0ac\ud56d\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/announcements"},"Announcements"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"question"},"Question"),(0,o.kt)("p",null,"\ud504\ub85c\uc81d\ud2b8 \ub0b4\uc6a9\uacfc \uad00\ub828\ub41c \uad81\uae08\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/q-a"},"Q&A"),"\ub97c \ud1b5\ud574 \uc9c8\ubb38\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"suggestion"},"Suggestion"),(0,o.kt)("p",null,"\uc81c\uc548\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/ideas"},"Ideas"),"\ub97c \ud1b5\ud574 \uc81c\uc548\ud574 \uc8fc\uc2dc\uba74 \ub429\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/d9cdcec8.ed342008.js b/assets/js/d9cdcec8.43d53649.js similarity index 99% rename from assets/js/d9cdcec8.ed342008.js rename to assets/js/d9cdcec8.43d53649.js index 0999ee11..cc8be7ef 100644 --- a/assets/js/d9cdcec8.ed342008.js +++ b/assets/js/d9cdcec8.43d53649.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2335],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function p(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),c=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):p(p({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(s.Provider,{value:t},e.children)},l="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),l=c(n),d=o,k=l["".concat(s,".").concat(d)]||l[d]||m[d]||a;return n?r.createElement(k,p(p({ref:t},u),{},{components:n})):r.createElement(k,p({ref:t},u))}));function k(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,p=new Array(a);p[0]=d;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[l]="string"==typeof e?e:o,p[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>p,default:()=>m,frontMatter:()=>a,metadata:()=>i,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},p=void 0,i={unversionedId:"introduction/why_kubernetes",id:"introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/docs/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/docs/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/why_kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/docs/introduction/component"},next:{title:"1. Introduction",permalink:"/docs/setup-kubernetes/intro"}},s={},c=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:c},l="wrapper";function m(e){let{components:t,...a}=e;return(0,o.kt)(l,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 MLOps\ub97c \uc774\uc57c\uae30\ud560 \ub54c, \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\ub77c\ub294 \ub2e8\uc5b4\uac00 \ud56d\uc0c1 \ud568\uaed8 \ub4e4\ub9ac\ub294 \uc774\uc720\uac00 \ubb34\uc5c7\uc77c\uae4c\uc694?"),(0,o.kt)("p",null,"\uc131\uacf5\uc801\uc778 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\ucd95\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,o.kt)("a",{parentName:"p",href:"/docs/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c")," \uc5d0\uc11c \uc124\uba85\ud55c \uac83\ucc98\ub7fc \ub2e4\uc591\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \ud544\uc694\ud558\uc9c0\ub9cc, \uac01\uac01\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \uc720\uae30\uc801\uc73c\ub85c \uc6b4\uc601\ub418\uae30 \uc704\ud574\uc11c\ub294 \uc778\ud504\ub77c \ub808\ubca8\uc5d0\uc11c \uc218\ub9ce\uc740 \uc774\uc288\ub97c \ud574\uacb0\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uac04\ub2e8\ud558\uac8c\ub294 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud559\uc2b5 \uc694\uccad\uc744 \ucc28\ub840\ub300\ub85c \uc2e4\ud589\ud558\ub294 \uac83, \ub2e4\ub978 \uc791\uc5c5 \uacf5\uac04\uc5d0\uc11c\ub3c4 \uac19\uc740 \uc2e4\ud589 \ud658\uacbd\uc744 \ubcf4\uc7a5\ud574\uc57c \ud558\ub294 \uac83, \ubc30\ud3ec\ub41c \uc11c\ube44\uc2a4\uc5d0 \uc7a5\uc560\uac00 \uc0dd\uacbc\uc744 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud558\ub294 \uac83 \ub4f1\uc758 \uc774\uc288 \ub4f1\uc744 \uc0dd\uac01\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c \ucee8\ud14c\uc774\ub108(Container)\uc640 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c(Container Orchestration System)\uc758 \ud544\uc694\uc131\uc774 \ub4f1\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud558\uba74 \uc2e4\ud589 \ud658\uacbd\uc758 \uaca9\ub9ac\uc640 \uad00\ub9ac\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud55c\ub2e4\uba74, \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uace0 \ubc30\ud3ec\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub2e4\uc218\uc758 \uac1c\ubc1c\uc790\uac00 \uc18c\uc218\uc758 \ud074\ub7ec\uc2a4\ud130\ub97c \uacf5\uc720\ud558\uba74\uc11c ",(0,o.kt)("em",{parentName:"p"},"'1\ubc88 \ud074\ub7ec\uc2a4\ud130 \uc0ac\uc6a9 \uc911\uc774\uc2e0\uac00\uc694?', 'GPU \uc0ac\uc6a9 \uc911\uc774\ub358 \uc81c \ud504\ub85c\uc138\uc2a4 \ub204\uac00 \uc8fd\uc600\ub098\uc694?', '\ub204\uac00 \ud074\ub7ec\uc2a4\ud130\uc5d0 x \ud328\ud0a4\uc9c0 \uc5c5\ub370\uc774\ud2b8\ud588\ub098\uc694?'")," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108\ub780 \ubb34\uc5c7\uc77c\uae4c\uc694? \ub9c8\uc774\ud06c\ub85c\uc18c\ud504\ud2b8\uc5d0\uc11c\ub294 \ucee8\ud14c\uc774\ub108\ub97c ",(0,o.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/ko-kr/overview/what-is-a-container/"},"\ub2e4\uc74c"),"\uacfc \uac19\uc774 \uc815\uc758\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"\ucee8\ud14c\uc774\ub108\ub780 : \uc560\ud50c\ub9ac\ucf00\uc774\uc158\uc758 \ud45c\uc900\ud654\ub41c \uc774\uc2dd \uac00\ub2a5\ud55c \ud328\ud0a4\uc9d5")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc65c \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ucee8\ud14c\uc774\ub108\uac00 \ud544\uc694\ud560\uae4c\uc694? \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc6b4\uc601\uccb4\uc81c\ub098 Python \uc2e4\ud589 \ud658\uacbd, \ud328\ud0a4\uc9c0 \ubc84\uc804 \ub4f1\uc5d0 \ub530\ub77c \ub2e4\ub974\uac8c \ub3d9\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \ubc29\uc9c0\ud558\uae30 \uc704\ud574\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc5d0 \uc0ac\uc6a9\ub41c \uc18c\uc2a4 \ucf54\ub4dc\uc640 \ud568\uaed8 \uc885\uc18d\uc801\uc778 \uc2e4\ud589 \ud658\uacbd \uc804\uccb4\ub97c ",(0,o.kt)("strong",{parentName:"p"},"\ud558\ub098\ub85c \ubb36\uc5b4\uc11c(\ud328\ud0a4\uc9d5\ud574\uc11c)")," \uacf5\uc720\ud558\uace0 \uc2e4\ud589\ud558\ub294 \ub370 \ud65c\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\uc220\uc785\ub2c8\ub2e4.\n\uc774\ub807\uac8c \ud328\ud0a4\uc9d5\ub41c \ud615\ud0dc\ub97c \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub974\uba70, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud568\uc73c\ub85c\uc368 \uc0ac\uc6a9\uc790\ub4e4\uc740 \uc5b4\ub5a4 \uc2dc\uc2a4\ud15c\uc5d0\uc11c\ub4e0 \uac19\uc740 \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud560 \uc218 \uc788\uac8c \ub429\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc989, \ub2e8\uc21c\ud788 Jupyter Notebook \ud30c\uc77c\uc774\ub098, \ubaa8\ub378\uc758 \uc18c\uc2a4 \ucf54\ub4dc\uc640 requirements.txt \ud30c\uc77c\uc744 \uacf5\uc720\ud558\ub294 \uac83\uc774 \uc544\ub2cc, \ubaa8\ub4e0 \uc2e4\ud589 \ud658\uacbd\uc774 \ub2f4\uae34 \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud55c\ub2e4\uba74 ",(0,o.kt)("em",{parentName:"p"},'"\uc81c \ub178\ud2b8\ubd81\uc5d0\uc11c\ub294 \uc798 \ub418\ub294\ub370\uc694?"')," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ud53c\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,'\ucee8\ud14c\uc774\ub108\ub97c \ucc98\uc74c \uc811\ud558\uc2dc\ub294 \ubd84\ub4e4\uc774 \ud754\ud788 \ud558\uc2dc\ub294 \uc624\ud574 \uc911 \ud558\ub098\ub294 "',(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108 == \ub3c4\ucee4"),'"\ub77c\uace0 \ubc1b\uc544\ub4e4\uc774\ub294 \uac83\uc785\ub2c8\ub2e4.',(0,o.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\ub294 \ucee8\ud14c\uc774\ub108\uc640 \uac19\uc740 \uc758\ubbf8\ub97c \uc9c0\ub2c8\ub294 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c, \ucee8\ud14c\uc774\ub108\ub97c \ub744\uc6b0\uac70\ub098, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4\uace0 \uacf5\uc720\ud558\ub294 \uac83\uacfc \uac19\uc774 \ucee8\ud14c\uc774\ub108\ub97c \ub354\uc6b1\ub354 \uc27d\uace0 \uc720\uc5f0\ud558\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc815\ub9ac\ud558\uc790\uba74 \ucee8\ud14c\uc774\ub108\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc774\uace0, \ub3c4\ucee4\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc758 \uad6c\ud604\uccb4\ub77c\uace0 \ub9d0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, \ub3c4\ucee4\ub294 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \ub3c4\uad6c \uc911\uc5d0\uc11c \uc26c\uc6b4 \uc0ac\uc6a9\uc131\uacfc \ub192\uc740 \ud6a8\uc728\uc131\uc744 \ubc14\ud0d5\uc73c\ub85c \uac00\uc7a5 \ube60\ub974\uac8c \uc131\uc7a5\ud558\uc5ec \ub300\uc138\uac00 \ub418\uc5c8\uae30\uc5d0 \ucee8\ud14c\uc774\ub108\ud558\uba74 \ub3c4\ucee4\ub77c\ub294 \uc774\ubbf8\uc9c0\uac00 \uc790\ub3d9\uc73c\ub85c \ub5a0\uc624\ub974\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ucee8\ud14c\uc774\ub108\uc640 \ub3c4\ucee4 \uc0dd\ud0dc\uacc4\uac00 \ub300\uc138\uac00 \ub418\uae30\uae4c\uc9c0\ub294 \ub2e4\uc591\ud55c \uc774\uc720\uac00 \uc788\uc9c0\ub9cc, \uae30\uc220\uc801\uc73c\ub85c \uc790\uc138\ud55c \uc774\uc57c\uae30\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ubc94\uc704\ub97c \ub118\uc5b4\uc11c\uae30 \ub54c\ubb38\uc5d0 \ub2e4\ub8e8\uc9c0\ub294 \uc54a\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ud639\uc740 \ub3c4\ucee4\ub97c \ucc98\uc74c \ub4e4\uc5b4\ubcf4\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ub0b4\uc6a9\uc774 \ub2e4\uc18c \uc5b4\ub835\uac8c \ub290\uaef4\uc9c8 \uc218 \uc788\uc73c\ubbc0\ub85c, ",(0,o.kt)("a",{parentName:"p",href:"https://opentutorials.org/course/4781"},"\uc0dd\ud65c\ucf54\ub529"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/2017/01/19/docker-guide-for-beginners-1.html"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc790\ub8cc\ub97c \uba3c\uc800 \uc0b4\ud3b4\ubcf4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694? ",(0,o.kt)("strong",{parentName:"p"},"\uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158"),"\uc774\ub77c\ub294 \ub2e8\uc5b4\uc5d0\uc11c \ucd94\uce21\ud574 \ubcfc \uc218 \uc788\ub4ef\uc774, \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc744 \ub54c \ucee8\ud14c\uc774\ub108\ub4e4\uc774 \uc11c\ub85c \uc870\ud654\ub86d\uac8c \uad6c\ub3d9\ub420 \uc218 \uc788\ub3c4\ub85d \uc9c0\ud718\ud558\ub294 \uc2dc\uc2a4\ud15c\uc5d0 \ube44\uc720\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \uae30\ubc18\uc758 \uc2dc\uc2a4\ud15c\uc5d0\uc11c \uc11c\ube44\uc2a4\ub294 \ucee8\ud14c\uc774\ub108\uc758 \ud615\ud0dc\ub85c \uc0ac\uc6a9\uc790\ub4e4\uc5d0\uac8c \uc81c\uacf5\ub429\ub2c8\ub2e4. \uc774\ub54c \uad00\ub9ac\ud574\uc57c \ud560 \ucee8\ud14c\uc774\ub108\uc758 \uc218\uac00 \uc801\ub2e4\uba74 \uc6b4\uc601 \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774\uc11c\ub3c4 \ucda9\ubd84\ud788 \ubaa8\ub4e0 \uc0c1\ud669\uc5d0 \ub300\uc751\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc, \uc218\ubc31 \uac1c \uc774\uc0c1\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc218 \uc2ed \ub300 \uc774\uc0c1\uc758 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uad6c\ub3d9\ub418\uace0 \uc788\uace0 \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\uc9c0 \uc54a\uace0 \ud56d\uc0c1 \uc815\uc0c1 \ub3d9\uc791\ud574\uc57c \ud55c\ub2e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uc758 \uc815\uc0c1 \ub3d9\uc791 \uc5ec\ubd80\ub97c \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774 \ud30c\uc545\ud558\uace0 \uc774\uc288\uc5d0 \ub300\uc751\ud558\ub294 \uac83\uc740 \ubd88\uac00\ub2a5\uc5d0 \uac00\uae5d\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \ubaa8\ub2c8\ud130\ub9c1(Monitoring)\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud2b9\uc815 \uc11c\ube44\uc2a4\uac00 \uc7a5\uc560\ub97c \uc77c\uc73c\ucf30\ub2e4\uba74 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108\uc758 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uac00\uba70 \ubb38\uc81c\ub97c \ud30c\uc545\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub610\ud55c, \ud2b9\uc815 \ud074\ub7ec\uc2a4\ud130\ub098 \ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uc5d0 \uc791\uc5c5\uc774 \ubab0\ub9ac\uc9c0 \uc54a\ub3c4\ub85d \uc2a4\ucf00\uc904\ub9c1(Scheduling)\ud558\uace0 \ub85c\ub4dc \ubc38\ub7f0\uc2f1(Load Balancing)\ud558\uba70, \uc2a4\ucf00\uc77c\ub9c1(Scaling)\ud558\ub294 \ub4f1\uc758 \uc218\ub9ce\uc740 \uc791\uc5c5\uc744 \ub2f4\ub2f9\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uc774\ub807\uac8c \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ub9ac\ud558\uace0 \uc6b4\uc601\ud558\ub294 \uacfc\uc815\uc744 \uc870\uae08\uc774\ub098\ub9c8 \uc27d\uac8c, \uc790\ub3d9\uc73c\ub85c \ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uac00 \ubc14\ub85c \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc785\ub2c8\ub2e4. "),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c\ub294 \uc5b4\ub5bb\uac8c \uc4f0\uc77c \uc218 \uc788\uc744\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c GPU\uac00 \uc788\uc5b4\uc57c \ud558\ub294 \ub525\ub7ec\ub2dd \ud559\uc2b5 \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc0ac\uc6a9 \uac00\ub2a5\ud55c GPU\uac00 \uc788\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ub9ce\uc740 \uba54\ubaa8\ub9ac\ub97c \ud544\uc694\ub85c \ud558\ub294 \ub370\uc774\ud130 \uc804\ucc98\ub9ac \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uba54\ubaa8\ub9ac\uc758 \uc5ec\uc720\uac00 \ub9ce\uc740 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ud559\uc2b5 \uc911\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubb38\uc81c\uac00 \uc0dd\uae30\uba74 \uc790\ub3d9\uc73c\ub85c \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \ub2e4\ub978 \ud074\ub7ec\uc2a4\ud130\ub85c \uc774\ub3d9\uc2dc\ud0a4\uace0 \ub2e4\uc2dc \ud559\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 \ub4f1\uc758 \uc791\uc5c5\uc744 \uc0ac\ub78c\uc774 \uc77c\uc77c\uc774 \uc218\ud589\ud558\uc9c0 \uc54a\uace0, \uc790\ub3d9\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \uc2dc\uc2a4\ud15c\uc744 \uac1c\ubc1c\ud55c \ub4a4 \ub9e1\uae30\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc9d1\ud544\uc744 \ud558\ub294 2022\ub144\uc744 \uae30\uc900\uc73c\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc758 \uc0ac\uc2e4\uc0c1\uc758 \ud45c\uc900(De facto standard)\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"CNCF\uc5d0\uc11c 2018\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"Survey")," \uc5d0 \ub530\ub974\uba74 \ub2e4\uc74c \uadf8\ub9bc\uacfc \uac19\uc774 \uc774\ubbf8 \ub450\uac01\uc744 \ub098\ud0c0\ub0b4\uace0 \uc788\uc5c8\uc73c\uba70, 2019\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"Survey"),"\uc5d0 \ub530\ub974\uba74 \uadf8\uc911 78%\uac00 \uc0c1\uc6a9 \uc218\uc900(Production Level)\uc5d0\uc11c \uc0ac\uc6a9\ud558\uace0 \uc788\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"k8s-graph",src:n(8870).Z,width:"2048",height:"1317"})),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0dd\ud0dc\uacc4\uac00 \uc774\ucc98\ub7fc \ucee4\uc9c0\uac8c \ub41c \uc774\uc720\uc5d0\ub294 \uc5ec\ub7ec \uac00\uc9c0 \uc774\uc720\uac00 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \ub3c4\ucee4\uc640 \ub9c8\ucc2c\uac00\uc9c0\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc5ed\uc2dc \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc11c\ube44\uc2a4\uc5d0\uc11c\ub9cc \uc0ac\uc6a9\ud558\ub294 \uae30\uc220\uc774 \uc544\ub2c8\uae30\uc5d0, \uc790\uc138\ud788 \ub2e4\ub8e8\uae30\uc5d0\ub294 \uc0c1\ub2f9\ud788 \ub9ce\uc740 \uc591\uc758 \uae30\uc220\uc801\uc778 \ub0b4\uc6a9\uc744 \ub2e4\ub8e8\uc5b4\uc57c \ud558\ubbc0\ub85c \uc774\ubc88 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc0dd\ub7b5\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc55e\uc73c\ub85c \ub2e4\ub8f0 \ub0b4\uc6a9\uc740 \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud55c \ub0b4\uc6a9\uc744 \uc5b4\ub290 \uc815\ub3c4 \uc54c\uace0 \uacc4\uc2e0 \ubd84\ub4e4\uc744 \ub300\uc0c1\uc73c\ub85c \uc791\uc131\ud558\uc600\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud574 \uc775\uc219\ud558\uc9c0 \uc54a\uc73c\uc2e0 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/overview/what-is-kubernetes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uacf5\uc2dd \ubb38\uc11c"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/k8s/"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc27d\uace0 \uc790\uc138\ud55c \uc790\ub8cc\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\uc8fc\uc2dc\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0},8870:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2335],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function p(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),c=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):p(p({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(s.Provider,{value:t},e.children)},l="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),l=c(n),d=o,k=l["".concat(s,".").concat(d)]||l[d]||m[d]||a;return n?r.createElement(k,p(p({ref:t},u),{},{components:n})):r.createElement(k,p({ref:t},u))}));function k(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,p=new Array(a);p[0]=d;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[l]="string"==typeof e?e:o,p[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>p,default:()=>m,frontMatter:()=>a,metadata:()=>i,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},p=void 0,i={unversionedId:"introduction/why_kubernetes",id:"introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/docs/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/docs/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/why_kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/docs/introduction/component"},next:{title:"1. Introduction",permalink:"/docs/setup-kubernetes/intro"}},s={},c=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:c},l="wrapper";function m(e){let{components:t,...a}=e;return(0,o.kt)(l,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 MLOps\ub97c \uc774\uc57c\uae30\ud560 \ub54c, \ucfe0\ubc84\ub124\ud2f0\uc2a4(Kubernetes)\ub77c\ub294 \ub2e8\uc5b4\uac00 \ud56d\uc0c1 \ud568\uaed8 \ub4e4\ub9ac\ub294 \uc774\uc720\uac00 \ubb34\uc5c7\uc77c\uae4c\uc694?"),(0,o.kt)("p",null,"\uc131\uacf5\uc801\uc778 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\ucd95\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,o.kt)("a",{parentName:"p",href:"/docs/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c")," \uc5d0\uc11c \uc124\uba85\ud55c \uac83\ucc98\ub7fc \ub2e4\uc591\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \ud544\uc694\ud558\uc9c0\ub9cc, \uac01\uac01\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uc774 \uc720\uae30\uc801\uc73c\ub85c \uc6b4\uc601\ub418\uae30 \uc704\ud574\uc11c\ub294 \uc778\ud504\ub77c \ub808\ubca8\uc5d0\uc11c \uc218\ub9ce\uc740 \uc774\uc288\ub97c \ud574\uacb0\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uac04\ub2e8\ud558\uac8c\ub294 \uc218\ub9ce\uc740 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \ud559\uc2b5 \uc694\uccad\uc744 \ucc28\ub840\ub300\ub85c \uc2e4\ud589\ud558\ub294 \uac83, \ub2e4\ub978 \uc791\uc5c5 \uacf5\uac04\uc5d0\uc11c\ub3c4 \uac19\uc740 \uc2e4\ud589 \ud658\uacbd\uc744 \ubcf4\uc7a5\ud574\uc57c \ud558\ub294 \uac83, \ubc30\ud3ec\ub41c \uc11c\ube44\uc2a4\uc5d0 \uc7a5\uc560\uac00 \uc0dd\uacbc\uc744 \ub54c \ube60\ub974\uac8c \ub300\uc751\ud574\uc57c \ud558\ub294 \uac83 \ub4f1\uc758 \uc774\uc288 \ub4f1\uc744 \uc0dd\uac01\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc5ec\uae30\uc11c \ucee8\ud14c\uc774\ub108(Container)\uc640 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c(Container Orchestration System)\uc758 \ud544\uc694\uc131\uc774 \ub4f1\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\uc640 \uac19\uc740 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud558\uba74 \uc2e4\ud589 \ud658\uacbd\uc758 \uaca9\ub9ac\uc640 \uad00\ub9ac\ub97c \ud6a8\uc728\uc801\uc73c\ub85c \uc218\ud589\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc744 \ub3c4\uc785\ud55c\ub2e4\uba74, \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \uac1c\ubc1c\ud558\uace0 \ubc30\ud3ec\ud558\ub294 \uacfc\uc815\uc5d0\uc11c \ub2e4\uc218\uc758 \uac1c\ubc1c\uc790\uac00 \uc18c\uc218\uc758 \ud074\ub7ec\uc2a4\ud130\ub97c \uacf5\uc720\ud558\uba74\uc11c ",(0,o.kt)("em",{parentName:"p"},"'1\ubc88 \ud074\ub7ec\uc2a4\ud130 \uc0ac\uc6a9 \uc911\uc774\uc2e0\uac00\uc694?', 'GPU \uc0ac\uc6a9 \uc911\uc774\ub358 \uc81c \ud504\ub85c\uc138\uc2a4 \ub204\uac00 \uc8fd\uc600\ub098\uc694?', '\ub204\uac00 \ud074\ub7ec\uc2a4\ud130\uc5d0 x \ud328\ud0a4\uc9c0 \uc5c5\ub370\uc774\ud2b8\ud588\ub098\uc694?'")," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ubc29\uc9c0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108\ub780 \ubb34\uc5c7\uc77c\uae4c\uc694? \ub9c8\uc774\ud06c\ub85c\uc18c\ud504\ud2b8\uc5d0\uc11c\ub294 \ucee8\ud14c\uc774\ub108\ub97c ",(0,o.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/ko-kr/overview/what-is-a-container/"},"\ub2e4\uc74c"),"\uacfc \uac19\uc774 \uc815\uc758\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},"\ucee8\ud14c\uc774\ub108\ub780 : \uc560\ud50c\ub9ac\ucf00\uc774\uc158\uc758 \ud45c\uc900\ud654\ub41c \uc774\uc2dd \uac00\ub2a5\ud55c \ud328\ud0a4\uc9d5")),(0,o.kt)("p",null,"\uadf8\ub7f0\ub370 \uc65c \uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c \ucee8\ud14c\uc774\ub108\uac00 \ud544\uc694\ud560\uae4c\uc694? \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\ub4e4\uc740 \uc6b4\uc601\uccb4\uc81c\ub098 Python \uc2e4\ud589 \ud658\uacbd, \ud328\ud0a4\uc9c0 \ubc84\uc804 \ub4f1\uc5d0 \ub530\ub77c \ub2e4\ub974\uac8c \ub3d9\uc791\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub97c \ubc29\uc9c0\ud558\uae30 \uc704\ud574\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc5d0 \uc0ac\uc6a9\ub41c \uc18c\uc2a4 \ucf54\ub4dc\uc640 \ud568\uaed8 \uc885\uc18d\uc801\uc778 \uc2e4\ud589 \ud658\uacbd \uc804\uccb4\ub97c ",(0,o.kt)("strong",{parentName:"p"},"\ud558\ub098\ub85c \ubb36\uc5b4\uc11c(\ud328\ud0a4\uc9d5\ud574\uc11c)")," \uacf5\uc720\ud558\uace0 \uc2e4\ud589\ud558\ub294 \ub370 \ud65c\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\uc220\uc774 \ucee8\ud14c\uc774\ub108\ub77c\uc774\uc81c\uc774\uc158(Containerization) \uae30\uc220\uc785\ub2c8\ub2e4.\n\uc774\ub807\uac8c \ud328\ud0a4\uc9d5\ub41c \ud615\ud0dc\ub97c \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub77c\uace0 \ubd80\ub974\uba70, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud568\uc73c\ub85c\uc368 \uc0ac\uc6a9\uc790\ub4e4\uc740 \uc5b4\ub5a4 \uc2dc\uc2a4\ud15c\uc5d0\uc11c\ub4e0 \uac19\uc740 \uc2e4\ud589 \uacb0\uacfc\ub97c \ubcf4\uc7a5\ud560 \uc218 \uc788\uac8c \ub429\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc989, \ub2e8\uc21c\ud788 Jupyter Notebook \ud30c\uc77c\uc774\ub098, \ubaa8\ub378\uc758 \uc18c\uc2a4 \ucf54\ub4dc\uc640 requirements.txt \ud30c\uc77c\uc744 \uacf5\uc720\ud558\ub294 \uac83\uc774 \uc544\ub2cc, \ubaa8\ub4e0 \uc2e4\ud589 \ud658\uacbd\uc774 \ub2f4\uae34 \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \uacf5\uc720\ud55c\ub2e4\uba74 ",(0,o.kt)("em",{parentName:"p"},'"\uc81c \ub178\ud2b8\ubd81\uc5d0\uc11c\ub294 \uc798 \ub418\ub294\ub370\uc694?"')," \uc640 \uac19\uc740 \uc0c1\ud669\uc744 \ud53c\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,'\ucee8\ud14c\uc774\ub108\ub97c \ucc98\uc74c \uc811\ud558\uc2dc\ub294 \ubd84\ub4e4\uc774 \ud754\ud788 \ud558\uc2dc\ub294 \uc624\ud574 \uc911 \ud558\ub098\ub294 "',(0,o.kt)("strong",{parentName:"p"},"\ucee8\ud14c\uc774\ub108 == \ub3c4\ucee4"),'"\ub77c\uace0 \ubc1b\uc544\ub4e4\uc774\ub294 \uac83\uc785\ub2c8\ub2e4.',(0,o.kt)("br",{parentName:"p"}),"\n","\ub3c4\ucee4\ub294 \ucee8\ud14c\uc774\ub108\uc640 \uac19\uc740 \uc758\ubbf8\ub97c \uc9c0\ub2c8\ub294 \uac1c\ub150\uc774 \uc544\ub2c8\ub77c, \ucee8\ud14c\uc774\ub108\ub97c \ub744\uc6b0\uac70\ub098, \ucee8\ud14c\uc774\ub108 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4\uace0 \uacf5\uc720\ud558\ub294 \uac83\uacfc \uac19\uc774 \ucee8\ud14c\uc774\ub108\ub97c \ub354\uc6b1\ub354 \uc27d\uace0 \uc720\uc5f0\ud558\uac8c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \ub3c4\uad6c\uc785\ub2c8\ub2e4. \uc815\ub9ac\ud558\uc790\uba74 \ucee8\ud14c\uc774\ub108\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc774\uace0, \ub3c4\ucee4\ub294 \uac00\uc0c1\ud654 \uae30\uc220\uc758 \uad6c\ud604\uccb4\ub77c\uace0 \ub9d0\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, \ub3c4\ucee4\ub294 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108 \uac00\uc0c1\ud654 \ub3c4\uad6c \uc911\uc5d0\uc11c \uc26c\uc6b4 \uc0ac\uc6a9\uc131\uacfc \ub192\uc740 \ud6a8\uc728\uc131\uc744 \ubc14\ud0d5\uc73c\ub85c \uac00\uc7a5 \ube60\ub974\uac8c \uc131\uc7a5\ud558\uc5ec \ub300\uc138\uac00 \ub418\uc5c8\uae30\uc5d0 \ucee8\ud14c\uc774\ub108\ud558\uba74 \ub3c4\ucee4\ub77c\ub294 \uc774\ubbf8\uc9c0\uac00 \uc790\ub3d9\uc73c\ub85c \ub5a0\uc624\ub974\uac8c \ub418\uc5c8\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \ucee8\ud14c\uc774\ub108\uc640 \ub3c4\ucee4 \uc0dd\ud0dc\uacc4\uac00 \ub300\uc138\uac00 \ub418\uae30\uae4c\uc9c0\ub294 \ub2e4\uc591\ud55c \uc774\uc720\uac00 \uc788\uc9c0\ub9cc, \uae30\uc220\uc801\uc73c\ub85c \uc790\uc138\ud55c \uc774\uc57c\uae30\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ubc94\uc704\ub97c \ub118\uc5b4\uc11c\uae30 \ub54c\ubb38\uc5d0 \ub2e4\ub8e8\uc9c0\ub294 \uc54a\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \ud639\uc740 \ub3c4\ucee4\ub97c \ucc98\uc74c \ub4e4\uc5b4\ubcf4\uc2dc\ub294 \ubd84\ub4e4\uc5d0\uac8c\ub294 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc758 \ub0b4\uc6a9\uc774 \ub2e4\uc18c \uc5b4\ub835\uac8c \ub290\uaef4\uc9c8 \uc218 \uc788\uc73c\ubbc0\ub85c, ",(0,o.kt)("a",{parentName:"p",href:"https://opentutorials.org/course/4781"},"\uc0dd\ud65c\ucf54\ub529"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/2017/01/19/docker-guide-for-beginners-1.html"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc790\ub8cc\ub97c \uba3c\uc800 \uc0b4\ud3b4\ubcf4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,o.kt)("p",null,"\uadf8\ub807\ub2e4\uba74 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc740 \ubb34\uc5c7\uc77c\uae4c\uc694? ",(0,o.kt)("strong",{parentName:"p"},"\uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158"),"\uc774\ub77c\ub294 \ub2e8\uc5b4\uc5d0\uc11c \ucd94\uce21\ud574 \ubcfc \uc218 \uc788\ub4ef\uc774, \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uac00 \uc788\uc744 \ub54c \ucee8\ud14c\uc774\ub108\ub4e4\uc774 \uc11c\ub85c \uc870\ud654\ub86d\uac8c \uad6c\ub3d9\ub420 \uc218 \uc788\ub3c4\ub85d \uc9c0\ud718\ud558\ub294 \uc2dc\uc2a4\ud15c\uc5d0 \ube44\uc720\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucee8\ud14c\uc774\ub108 \uae30\ubc18\uc758 \uc2dc\uc2a4\ud15c\uc5d0\uc11c \uc11c\ube44\uc2a4\ub294 \ucee8\ud14c\uc774\ub108\uc758 \ud615\ud0dc\ub85c \uc0ac\uc6a9\uc790\ub4e4\uc5d0\uac8c \uc81c\uacf5\ub429\ub2c8\ub2e4. \uc774\ub54c \uad00\ub9ac\ud574\uc57c \ud560 \ucee8\ud14c\uc774\ub108\uc758 \uc218\uac00 \uc801\ub2e4\uba74 \uc6b4\uc601 \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774\uc11c\ub3c4 \ucda9\ubd84\ud788 \ubaa8\ub4e0 \uc0c1\ud669\uc5d0 \ub300\uc751\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc, \uc218\ubc31 \uac1c \uc774\uc0c1\uc758 \ucee8\ud14c\uc774\ub108\uac00 \uc218 \uc2ed \ub300 \uc774\uc0c1\uc758 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uad6c\ub3d9\ub418\uace0 \uc788\uace0 \uc7a5\uc560\ub97c \uc77c\uc73c\ud0a4\uc9c0 \uc54a\uace0 \ud56d\uc0c1 \uc815\uc0c1 \ub3d9\uc791\ud574\uc57c \ud55c\ub2e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uc758 \uc815\uc0c1 \ub3d9\uc791 \uc5ec\ubd80\ub97c \ub2f4\ub2f9\uc790 \ud55c \uba85\uc774 \ud30c\uc545\ud558\uace0 \uc774\uc288\uc5d0 \ub300\uc751\ud558\ub294 \uac83\uc740 \ubd88\uac00\ub2a5\uc5d0 \uac00\uae5d\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc608\ub97c \ub4e4\uba74, \ubaa8\ub4e0 \uc11c\ube44\uc2a4\uac00 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \uacc4\uc18d\ud574\uc11c \ubaa8\ub2c8\ud130\ub9c1(Monitoring)\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \ud2b9\uc815 \uc11c\ube44\uc2a4\uac00 \uc7a5\uc560\ub97c \uc77c\uc73c\ucf30\ub2e4\uba74 \uc5ec\ub7ec \ucee8\ud14c\uc774\ub108\uc758 \ub85c\uadf8\ub97c \ud655\uc778\ud574\uac00\uba70 \ubb38\uc81c\ub97c \ud30c\uc545\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\ub610\ud55c, \ud2b9\uc815 \ud074\ub7ec\uc2a4\ud130\ub098 \ud2b9\uc815 \ucee8\ud14c\uc774\ub108\uc5d0 \uc791\uc5c5\uc774 \ubab0\ub9ac\uc9c0 \uc54a\ub3c4\ub85d \uc2a4\ucf00\uc904\ub9c1(Scheduling)\ud558\uace0 \ub85c\ub4dc \ubc38\ub7f0\uc2f1(Load Balancing)\ud558\uba70, \uc2a4\ucf00\uc77c\ub9c1(Scaling)\ud558\ub294 \ub4f1\uc758 \uc218\ub9ce\uc740 \uc791\uc5c5\uc744 \ub2f4\ub2f9\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uc774\ub807\uac8c \uc218\ub9ce\uc740 \ucee8\ud14c\uc774\ub108\uc758 \uc0c1\ud0dc\ub97c \uc9c0\uc18d\ud574\uc11c \uad00\ub9ac\ud558\uace0 \uc6b4\uc601\ud558\ub294 \uacfc\uc815\uc744 \uc870\uae08\uc774\ub098\ub9c8 \uc27d\uac8c, \uc790\ub3d9\uc73c\ub85c \ud560 \uc218 \uc788\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud574\uc8fc\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\uac00 \ubc14\ub85c \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc785\ub2c8\ub2e4. "),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd\uc5d0\uc11c\ub294 \uc5b4\ub5bb\uac8c \uc4f0\uc77c \uc218 \uc788\uc744\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uc608\ub97c \ub4e4\uc5b4\uc11c GPU\uac00 \uc788\uc5b4\uc57c \ud558\ub294 \ub525\ub7ec\ub2dd \ud559\uc2b5 \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uc0ac\uc6a9 \uac00\ub2a5\ud55c GPU\uac00 \uc788\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ub9ce\uc740 \uba54\ubaa8\ub9ac\ub97c \ud544\uc694\ub85c \ud558\ub294 \ub370\uc774\ud130 \uc804\ucc98\ub9ac \ucf54\ub4dc\uac00 \ud328\ud0a4\uc9d5\ub41c \ucee8\ud14c\uc774\ub108\ub294 \uba54\ubaa8\ub9ac\uc758 \uc5ec\uc720\uac00 \ub9ce\uc740 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c \uc218\ud589\ud558\uace0, \ud559\uc2b5 \uc911\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc5d0 \ubb38\uc81c\uac00 \uc0dd\uae30\uba74 \uc790\ub3d9\uc73c\ub85c \uac19\uc740 \ucee8\ud14c\uc774\ub108\ub97c \ub2e4\ub978 \ud074\ub7ec\uc2a4\ud130\ub85c \uc774\ub3d9\uc2dc\ud0a4\uace0 \ub2e4\uc2dc \ud559\uc2b5\uc744 \uc9c4\ud589\ud558\ub294 \ub4f1\uc758 \uc791\uc5c5\uc744 \uc0ac\ub78c\uc774 \uc77c\uc77c\uc774 \uc218\ud589\ud558\uc9c0 \uc54a\uace0, \uc790\ub3d9\uc73c\ub85c \uad00\ub9ac\ud558\ub294 \uc2dc\uc2a4\ud15c\uc744 \uac1c\ubc1c\ud55c \ub4a4 \ub9e1\uae30\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc9d1\ud544\uc744 \ud558\ub294 2022\ub144\uc744 \uae30\uc900\uc73c\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4\ub294 \ucee8\ud14c\uc774\ub108 \uc624\ucf00\uc2a4\ud2b8\ub808\uc774\uc158 \uc2dc\uc2a4\ud15c\uc758 \uc0ac\uc2e4\uc0c1\uc758 \ud45c\uc900(De facto standard)\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"CNCF\uc5d0\uc11c 2018\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"Survey")," \uc5d0 \ub530\ub974\uba74 \ub2e4\uc74c \uadf8\ub9bc\uacfc \uac19\uc774 \uc774\ubbf8 \ub450\uac01\uc744 \ub098\ud0c0\ub0b4\uace0 \uc788\uc5c8\uc73c\uba70, 2019\ub144 \ubc1c\ud45c\ud55c ",(0,o.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"Survey"),"\uc5d0 \ub530\ub974\uba74 \uadf8\uc911 78%\uac00 \uc0c1\uc6a9 \uc218\uc900(Production Level)\uc5d0\uc11c \uc0ac\uc6a9\ud558\uace0 \uc788\ub2e4\ub294 \uac83\uc744 \uc54c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"k8s-graph",src:n(8870).Z,width:"2048",height:"1317"})),(0,o.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc0dd\ud0dc\uacc4\uac00 \uc774\ucc98\ub7fc \ucee4\uc9c0\uac8c \ub41c \uc774\uc720\uc5d0\ub294 \uc5ec\ub7ec \uac00\uc9c0 \uc774\uc720\uac00 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc \ub3c4\ucee4\uc640 \ub9c8\ucc2c\uac00\uc9c0\ub85c \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc5ed\uc2dc \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc11c\ube44\uc2a4\uc5d0\uc11c\ub9cc \uc0ac\uc6a9\ud558\ub294 \uae30\uc220\uc774 \uc544\ub2c8\uae30\uc5d0, \uc790\uc138\ud788 \ub2e4\ub8e8\uae30\uc5d0\ub294 \uc0c1\ub2f9\ud788 \ub9ce\uc740 \uc591\uc758 \uae30\uc220\uc801\uc778 \ub0b4\uc6a9\uc744 \ub2e4\ub8e8\uc5b4\uc57c \ud558\ubbc0\ub85c \uc774\ubc88 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc790\uc138\ud55c \ub0b4\uc6a9\uc740 \uc0dd\ub7b5\ud560 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub2e4\ub9cc, ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc55e\uc73c\ub85c \ub2e4\ub8f0 \ub0b4\uc6a9\uc740 \ub3c4\ucee4\uc640 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud55c \ub0b4\uc6a9\uc744 \uc5b4\ub290 \uc815\ub3c4 \uc54c\uace0 \uacc4\uc2e0 \ubd84\ub4e4\uc744 \ub300\uc0c1\uc73c\ub85c \uc791\uc131\ud558\uc600\uc2b5\ub2c8\ub2e4. \ub530\ub77c\uc11c \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc5d0 \ub300\ud574 \uc775\uc219\ud558\uc9c0 \uc54a\uc73c\uc2e0 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/overview/what-is-kubernetes/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uacf5\uc2dd \ubb38\uc11c"),", ",(0,o.kt)("a",{parentName:"p",href:"https://subicura.com/k8s/"},"subicura \ub2d8\uc758 \uac1c\uc778 \ube14\ub85c\uadf8 \uae00")," \ub4f1\uc758 \uc27d\uace0 \uc790\uc138\ud55c \uc790\ub8cc\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\uc8fc\uc2dc\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."))}m.isMDXComponent=!0},8870:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file diff --git a/assets/js/d9ed3de0.369633af.js b/assets/js/d9ed3de0.a4f05ca4.js similarity index 98% rename from assets/js/d9ed3de0.369633af.js rename to assets/js/d9ed3de0.a4f05ca4.js index db173cef..3156cdae 100644 --- a/assets/js/d9ed3de0.369633af.js +++ b/assets/js/d9ed3de0.a4f05ca4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7345],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>y});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=r.createContext({}),m=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=m(e.components);return r.createElement(i.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),u=m(n),d=o,y=u["".concat(i,".").concat(d)]||u[d]||c[d]||a;return n?r.createElement(y,l(l({ref:t},s),{},{components:n})):r.createElement(y,l({ref:t},s))}));function y(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=d;var p={};for(var i in t)hasOwnProperty.call(t,i)&&(p[i]=t[i]);p.originalType=e,p[u]="string"==typeof e?e:o,l[1]=p;for(var m=2;m{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>c,frontMatter:()=>a,metadata:()=>p,toc:()=>m});var r=n(7462),o=(n(7294),n(3905));const a={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},l=void 0,p={unversionedId:"api-deployment/what-is-api-deployment",id:"api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/docs/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/docs/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/what-is-api-deployment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/docs/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/docs/api-deployment/seldon-iris"}},i={},m=[{value:"API Deployment\ub780?",id:"api-deployment\ub780",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],s={toc:m},u="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"api-deployment\ub780"},"API Deployment\ub780?"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ud559\uc2b5\ud55c \ub4a4\uc5d0\ub294 \uc5b4\ub5bb\uac8c \uc0ac\uc6a9\ud574\uc57c \ud560\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uc744 \ud559\uc2b5\ud560 \ub54c\ub294 \ub354 \ub192\uc740 \uc131\ub2a5\uc758 \ubaa8\ub378\uc774 \ub098\uc624\uae30\ub97c \uae30\ub300\ud558\uc9c0\ub9cc, \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud558\uc5ec \ucd94\ub860\uc744 \ud560 \ub54c\ub294 \ube60\ub974\uace0 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc544\ubcf4\uace0 \uc2f6\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud558\uace0\uc790 \ud560 \ub54c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc774\ub098 \ud30c\uc774\uc36c \uc2a4\ud06c\ub9bd\ud2b8\ub97c \ud1b5\ud574 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ub85c\ub4dc\ud55c \ub4a4 \ucd94\ub860\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc \uc774\ub7f0 \ubc29\ubc95\uc740 \ubaa8\ub378\uc774 \ud074\uc218\ub85d \ubaa8\ub378\uc744 \ubd88\ub7ec\uc624\ub294 \ub370 \ub9ce\uc740 \uc2dc\uac04\uc744 \uc18c\uc694\ud558\uac8c \ub418\uc5b4\uc11c \ube44\ud6a8\uc728\uc801\uc785\ub2c8\ub2e4. \ub610\ud55c \uc774\ub807\uac8c \uc774\uc6a9\ud558\uba74 \ub9ce\uc740 \uc0ac\ub78c\uc774 \ubaa8\ub378\uc744 \uc774\uc6a9\ud560 \uc218 \uc5c6\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\ub294 \ud658\uacbd\uc5d0\uc11c\ubc16\uc5d0 \uc0ac\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uadf8\ub798\uc11c \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc774 \uc0ac\uc6a9\ub420 \ub54c\ub294 API\ub97c \uc774\uc6a9\ud574\uc11c \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ubaa8\ub378\uc740 API \uc11c\ubc84\uac00 \uad6c\ub3d9\ub418\ub294 \ud658\uacbd\uc5d0\uc11c \ud55c \ubc88\ub9cc \ub85c\ub4dc\uac00 \ub418\uba70, DNS\ub97c \ud65c\uc6a9\ud558\uc5ec \uc678\ubd80\uc5d0\uc11c\ub3c4 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uace0 \ub2e4\ub978 \uc11c\ube44\uc2a4\uc640 \uc5f0\ub3d9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ud558\uc9c0\ub9cc \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc5d0\ub294 \uc0dd\uac01\ubcf4\ub2e4 \ub9ce\uc740 \ubd80\uc218\uc801\uc778 \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc744 \ub354 \uc27d\uac8c \ud558\uae30 \uc704\ud574\uc11c Tensorflow\uc640 \uac19\uc740 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c \uc9c4\uc601\uc5d0\uc11c\ub294 \ucd94\ub860 \uc5d4\uc9c4(Inference engine)\uc744 \uac1c\ubc1c\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc774\uc6a9\ud558\uba74 \ud574\ub2f9 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c\ub85c \uac1c\ubc1c\ub418\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ubd88\ub7ec\uc640 \ucd94\ub860\uc774 \uac00\ub2a5\ud55c API(REST \ub610\ub294 gRPC)\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\uc744 \ud65c\uc6a9\ud558\uc5ec \uad6c\ucd95\ud55c API \uc11c\ubc84\ub85c \ucd94\ub860\ud558\uace0\uc790 \ud558\ub294 \ub370\uc774\ud130\ub97c \ub2f4\uc544 \uc694\uccad\uc744 \ubcf4\ub0b4\uba74, \ucd94\ub860 \uc5d4\uc9c4\uc774 \ucd94\ub860 \uacb0\uacfc\ub97c \uc751\ub2f5\uc5d0 \ub2f4\uc544 \uc804\uc1a1\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub300\ud45c\uc801\uc73c\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow : Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch : Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"Onnx : Onnx Runtime"))),(0,o.kt)("p",null,"\uc624\ud504\uc18c\uc2a4\uc5d0\uc11c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uc9c0\ub294 \uc54a\uc9c0\ub9cc, \ub9ce\uc774 \uc4f0\uc774\ub294 sklearn, xgboost \ud504\ub808\uc784\uc6cc\ud06c\ub97c \uc704\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub3c4 \uac1c\ubc1c\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c API\uc758 \ud615\ud0dc\ub85c \ubc1b\uc544\ubcfc \uc218 \uc788\ub3c4\ub85d \ubc30\ud3ec\ud558\ub294 \uac83\uc744 ",(0,o.kt)("strong",{parentName:"p"},"API Deployment"),"\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub2e4\uc591\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\ub2e4\ub294 \uc0ac\uc2e4\uc744 \uc18c\uac1c\ud574 \ub4dc\ub838\uc2b5\ub2c8\ub2e4.\n\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0\uc11c \uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud55c\ub2e4\uba74 \uc5b4\ub5a4 \uc791\uc5c5\uc774 \ud544\uc694\ud560\uae4c\uc694?\n\ucd94\ub860 \uc5d4\uc9c4\uc744 \ubc30\ud3ec\ud558\uae30 \uc704\ud55c Deployment, \ucd94\ub860 \uc694\uccad\uc744 \ubcf4\ub0bc Endpoint\ub97c \uc0dd\uc131\ud558\uae30 \uc704\ud55c Service,\n\uc678\ubd80\uc5d0\uc11c\uc758 \ucd94\ub860 \uc694\uccad\uc744 \ucd94\ub860 \uc5d4\uc9c4\uc73c\ub85c \ubcf4\ub0b4\uae30 \uc704\ud55c Ingress \ub4f1 \ub9ce\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ubc30\ud3ec\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774\uac83 \uc774\uc678\uc5d0\ub3c4, \ub9ce\uc740 \ucd94\ub860 \uc694\uccad\uc774 \ub4e4\uc5b4\uc654\uc744 \uacbd\uc6b0\uc758 \uc2a4\ucf00\uc77c \uc544\uc6c3(scale-out), \ucd94\ub860 \uc5d4\uc9c4 \uc0c1\ud0dc\uc5d0 \ub300\ud55c \ubaa8\ub2c8\ud130\ub9c1, \uac1c\uc120\ub41c \ubaa8\ub378\uc774 \ub098\uc654\uc744 \uacbd\uc6b0 \ubc84\uc804 \uc5c5\ub370\uc774\ud2b8 \ub4f1 \ucd94\ub860 \uc5d4\uc9c4\uc744 \uc6b4\uc601\ud560 \ub54c\uc758 \uc694\uad6c\uc0ac\ud56d\uc740 \ud55c\ub450 \uac00\uc9c0\uac00 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ub7ec\ud55c \ub9ce\uc740 \uc694\uad6c\uc0ac\ud56d\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud574 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd \uc704\uc5d0\uc11c \ud55c \ubc88 \ub354 \ucd94\uc0c1\ud654\ud55c ",(0,o.kt)("strong",{parentName:"p"},"Serving Framework"),"\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uac1c\ubc1c\ub41c Serving Framework\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Seldon Core\ub97c \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud558\ub294 \uacfc\uc815\uc744 \ub2e4\ub8e8\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7345],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>y});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=r.createContext({}),m=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=m(e.components);return r.createElement(i.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,i=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),u=m(n),d=o,y=u["".concat(i,".").concat(d)]||u[d]||c[d]||a;return n?r.createElement(y,l(l({ref:t},s),{},{components:n})):r.createElement(y,l({ref:t},s))}));function y(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,l=new Array(a);l[0]=d;var p={};for(var i in t)hasOwnProperty.call(t,i)&&(p[i]=t[i]);p.originalType=e,p[u]="string"==typeof e?e:o,l[1]=p;for(var m=2;m{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>c,frontMatter:()=>a,metadata:()=>p,toc:()=>m});var r=n(7462),o=(n(7294),n(3905));const a={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},l=void 0,p={unversionedId:"api-deployment/what-is-api-deployment",id:"api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/docs/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/docs/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/what-is-api-deployment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/docs/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/docs/api-deployment/seldon-iris"}},i={},m=[{value:"API Deployment\ub780?",id:"api-deployment\ub780",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],s={toc:m},u="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"api-deployment\ub780"},"API Deployment\ub780?"),(0,o.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc744 \ud559\uc2b5\ud55c \ub4a4\uc5d0\ub294 \uc5b4\ub5bb\uac8c \uc0ac\uc6a9\ud574\uc57c \ud560\uae4c\uc694?",(0,o.kt)("br",{parentName:"p"}),"\n","\uba38\uc2e0\ub7ec\ub2dd\uc744 \ud559\uc2b5\ud560 \ub54c\ub294 \ub354 \ub192\uc740 \uc131\ub2a5\uc758 \ubaa8\ub378\uc774 \ub098\uc624\uae30\ub97c \uae30\ub300\ud558\uc9c0\ub9cc, \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud558\uc5ec \ucd94\ub860\uc744 \ud560 \ub54c\ub294 \ube60\ub974\uace0 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc544\ubcf4\uace0 \uc2f6\uc744 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c \ud655\uc778\ud558\uace0\uc790 \ud560 \ub54c \uc8fc\ud53c\ud130 \ub178\ud2b8\ubd81\uc774\ub098 \ud30c\uc774\uc36c \uc2a4\ud06c\ub9bd\ud2b8\ub97c \ud1b5\ud574 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ub85c\ub4dc\ud55c \ub4a4 \ucd94\ub860\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub807\uc9c0\ub9cc \uc774\ub7f0 \ubc29\ubc95\uc740 \ubaa8\ub378\uc774 \ud074\uc218\ub85d \ubaa8\ub378\uc744 \ubd88\ub7ec\uc624\ub294 \ub370 \ub9ce\uc740 \uc2dc\uac04\uc744 \uc18c\uc694\ud558\uac8c \ub418\uc5b4\uc11c \ube44\ud6a8\uc728\uc801\uc785\ub2c8\ub2e4. \ub610\ud55c \uc774\ub807\uac8c \uc774\uc6a9\ud558\uba74 \ub9ce\uc740 \uc0ac\ub78c\uc774 \ubaa8\ub378\uc744 \uc774\uc6a9\ud560 \uc218 \uc5c6\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc774 \uc788\ub294 \ud658\uacbd\uc5d0\uc11c\ubc16\uc5d0 \uc0ac\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uadf8\ub798\uc11c \uc2e4\uc81c \uc11c\ube44\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd\uc774 \uc0ac\uc6a9\ub420 \ub54c\ub294 API\ub97c \uc774\uc6a9\ud574\uc11c \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \ubaa8\ub378\uc740 API \uc11c\ubc84\uac00 \uad6c\ub3d9\ub418\ub294 \ud658\uacbd\uc5d0\uc11c \ud55c \ubc88\ub9cc \ub85c\ub4dc\uac00 \ub418\uba70, DNS\ub97c \ud65c\uc6a9\ud558\uc5ec \uc678\ubd80\uc5d0\uc11c\ub3c4 \uc27d\uac8c \ucd94\ub860 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uace0 \ub2e4\ub978 \uc11c\ube44\uc2a4\uc640 \uc5f0\ub3d9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ud558\uc9c0\ub9cc \ubaa8\ub378\uc744 API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc5d0\ub294 \uc0dd\uac01\ubcf4\ub2e4 \ub9ce\uc740 \ubd80\uc218\uc801\uc778 \uc791\uc5c5\uc774 \ud544\uc694\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uadf8\ub798\uc11c API\ub85c \ub9cc\ub4dc\ub294 \uc791\uc5c5\uc744 \ub354 \uc27d\uac8c \ud558\uae30 \uc704\ud574\uc11c Tensorflow\uc640 \uac19\uc740 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c \uc9c4\uc601\uc5d0\uc11c\ub294 \ucd94\ub860 \uc5d4\uc9c4(Inference engine)\uc744 \uac1c\ubc1c\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc774\uc6a9\ud558\uba74 \ud574\ub2f9 \uba38\uc2e0\ub7ec\ub2dd \ud504\ub808\uc784\uc6cc\ud06c\ub85c \uac1c\ubc1c\ub418\uace0 \ud559\uc2b5\ub41c \ubaa8\ub378\uc744 \ubd88\ub7ec\uc640 \ucd94\ub860\uc774 \uac00\ub2a5\ud55c API(REST \ub610\ub294 gRPC)\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4.",(0,o.kt)("br",{parentName:"p"}),"\n","\uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\uc744 \ud65c\uc6a9\ud558\uc5ec \uad6c\ucd95\ud55c API \uc11c\ubc84\ub85c \ucd94\ub860\ud558\uace0\uc790 \ud558\ub294 \ub370\uc774\ud130\ub97c \ub2f4\uc544 \uc694\uccad\uc744 \ubcf4\ub0b4\uba74, \ucd94\ub860 \uc5d4\uc9c4\uc774 \ucd94\ub860 \uacb0\uacfc\ub97c \uc751\ub2f5\uc5d0 \ub2f4\uc544 \uc804\uc1a1\ud558\ub294 \uac83\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"\ub300\ud45c\uc801\uc73c\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow : Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch : Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"Onnx : Onnx Runtime"))),(0,o.kt)("p",null,"\uc624\ud504\uc18c\uc2a4\uc5d0\uc11c \uacf5\uc2dd\uc801\uc73c\ub85c \uc9c0\uc6d0\ud558\uc9c0\ub294 \uc54a\uc9c0\ub9cc, \ub9ce\uc774 \uc4f0\uc774\ub294 sklearn, xgboost \ud504\ub808\uc784\uc6cc\ud06c\ub97c \uc704\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub3c4 \uac1c\ubc1c\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ucc98\ub7fc \ubaa8\ub378\uc758 \ucd94\ub860 \uacb0\uacfc\ub97c API\uc758 \ud615\ud0dc\ub85c \ubc1b\uc544\ubcfc \uc218 \uc788\ub3c4\ub85d \ubc30\ud3ec\ud558\ub294 \uac83\uc744 ",(0,o.kt)("strong",{parentName:"p"},"API Deployment"),"\ub77c\uace0 \ud569\ub2c8\ub2e4."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"\uc704\uc5d0\uc11c \ub2e4\uc591\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\ub2e4\ub294 \uc0ac\uc2e4\uc744 \uc18c\uac1c\ud574 \ub4dc\ub838\uc2b5\ub2c8\ub2e4.\n\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd\uc5d0\uc11c \uc774\ub7ec\ud55c \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud55c\ub2e4\uba74 \uc5b4\ub5a4 \uc791\uc5c5\uc774 \ud544\uc694\ud560\uae4c\uc694?\n\ucd94\ub860 \uc5d4\uc9c4\uc744 \ubc30\ud3ec\ud558\uae30 \uc704\ud55c Deployment, \ucd94\ub860 \uc694\uccad\uc744 \ubcf4\ub0bc Endpoint\ub97c \uc0dd\uc131\ud558\uae30 \uc704\ud55c Service,\n\uc678\ubd80\uc5d0\uc11c\uc758 \ucd94\ub860 \uc694\uccad\uc744 \ucd94\ub860 \uc5d4\uc9c4\uc73c\ub85c \ubcf4\ub0b4\uae30 \uc704\ud55c Ingress \ub4f1 \ub9ce\uc740 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ub9ac\uc18c\uc2a4\ub97c \ubc30\ud3ec\ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\uc774\uac83 \uc774\uc678\uc5d0\ub3c4, \ub9ce\uc740 \ucd94\ub860 \uc694\uccad\uc774 \ub4e4\uc5b4\uc654\uc744 \uacbd\uc6b0\uc758 \uc2a4\ucf00\uc77c \uc544\uc6c3(scale-out), \ucd94\ub860 \uc5d4\uc9c4 \uc0c1\ud0dc\uc5d0 \ub300\ud55c \ubaa8\ub2c8\ud130\ub9c1, \uac1c\uc120\ub41c \ubaa8\ub378\uc774 \ub098\uc654\uc744 \uacbd\uc6b0 \ubc84\uc804 \uc5c5\ub370\uc774\ud2b8 \ub4f1 \ucd94\ub860 \uc5d4\uc9c4\uc744 \uc6b4\uc601\ud560 \ub54c\uc758 \uc694\uad6c\uc0ac\ud56d\uc740 \ud55c\ub450 \uac00\uc9c0\uac00 \uc544\ub2d9\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc774\ub7ec\ud55c \ub9ce\uc740 \uc694\uad6c\uc0ac\ud56d\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud574 \ucd94\ub860 \uc5d4\uc9c4\ub4e4\uc744 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud658\uacbd \uc704\uc5d0\uc11c \ud55c \ubc88 \ub354 \ucd94\uc0c1\ud654\ud55c ",(0,o.kt)("strong",{parentName:"p"},"Serving Framework"),"\ub4e4\uc774 \uac1c\ubc1c\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uac1c\ubc1c\ub41c Serving Framework\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 Seldon Core\ub97c \uc0ac\uc6a9\ud558\uc5ec API Deployment\ub97c \ud558\ub294 \uacfc\uc815\uc744 \ub2e4\ub8e8\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e070c0d8.090c99cc.js b/assets/js/e070c0d8.1a134767.js similarity index 97% rename from assets/js/e070c0d8.090c99cc.js rename to assets/js/e070c0d8.1a134767.js index 8f29c9d1..a9095fef 100644 --- a/assets/js/e070c0d8.090c99cc.js +++ b/assets/js/e070c0d8.1a134767.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5127],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),f=o,d=u["".concat(p,".").concat(f)]||u[f]||m[f]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=f;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/docs/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/docs/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-requirements.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/docs/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/docs/kubeflow/basic-component"}},p={},s=[],c={toc:s},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc704\ud574 \uad8c\uc7a5\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc740 python>=3.7\uc785\ub2c8\ub2e4. \ud30c\uc774\uc36c \ud658\uacbd\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\uc740 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ucc38\uace0\ud558\uc5ec ",(0,o.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0 \uc124\uce58\ud574\uc8fc\uc2e0 \ub4a4 \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\uae30\uc5d0\uc11c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uacfc \ubc84\uc804\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"\uc55e\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"\ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5127],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),f=o,d=u["".concat(p,".").concat(f)]||u[f]||m[f]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=f;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/docs/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/docs/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-requirements.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/docs/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/docs/kubeflow/basic-component"}},p={},s=[],c={toc:s},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc704\ud574 \uad8c\uc7a5\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc740 python>=3.7\uc785\ub2c8\ub2e4. \ud30c\uc774\uc36c \ud658\uacbd\uc5d0 \uc775\uc219\ud558\uc9c0 \uc54a\uc740 \ubd84\ub4e4\uc740 \ub2e4\uc74c ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ucc38\uace0\ud558\uc5ec ",(0,o.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc"),"\uc5d0 \uc124\uce58\ud574\uc8fc\uc2e0 \ub4a4 \ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud574\uc8fc\uc2dc\uae30\ub97c \ubc14\ub78d\ub2c8\ub2e4."),(0,o.kt)("p",null,"\uc2e4\uc2b5\uc744 \uc9c4\ud589\ud558\uae30\uc5d0\uc11c \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\ub4e4\uacfc \ubc84\uc804\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,(0,o.kt)("a",{parentName:"p",href:"/docs/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"\uc55e\uc5d0\uc11c \ub9cc\ub4e0 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),"\uc744 \ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"\ud328\ud0a4\uc9c0 \uc124\uce58\ub97c \uc9c4\ud589\ud569\ub2c8\ub2e4."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e287e96b.1c29974a.js b/assets/js/e287e96b.92c61555.js similarity index 99% rename from assets/js/e287e96b.1c29974a.js rename to assets/js/e287e96b.92c61555.js index aa248686..07baa3c8 100644 --- a/assets/js/e287e96b.1c29974a.js +++ b/assets/js/e287e96b.92c61555.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8657],{3905:(n,e,r)=>{r.d(e,{Zo:()=>o,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function u(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function a(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var u=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):a(a({},e),n)),r},o=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},_="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,u=n.originalType,s=n.parentName,o=i(n,["components","mdxType","originalType","parentName"]),_=m(r),d=p,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||u;return r?t.createElement(b,a(a({ref:e},o),{},{components:r})):t.createElement(b,a({ref:e},o))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var u=r.length,a=new Array(u);a[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[_]="string"==typeof n?n:p,a[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>a,default:()=>l,frontMatter:()=>u,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const u={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},a=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"version-1.0/kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/docs/1.0/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/docs/1.0/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/docs/1.0/kubeflow/basic-pipeline-upload"}},s={},m=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],o={toc:m},_="wrapper";function l(n){let{components:e,...u}=n;return(0,p.kt)(_,(0,t.Z)({},o,u,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline"},"Pipeline"),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub418\uc9c0 \uc54a\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uad6c\uc131\uc694\uc18c\ub85c\uc368 \uc2e4\ud589\ub429\ub2c8\ub2e4. \uadf8\ub7ec\ubbc0\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud574 \ubcf4\ub824\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc21c\uc11c\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc22b\uc790\ub97c \uc785\ub825\ubc1b\uace0 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ub450 \uac1c\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc22b\uc790\ub97c \ubc1b\uc544\uc11c \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc788\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub9cc\ub4e4\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"component-set"},"Component Set"),(0,p.kt)("p",null,"\uc6b0\uc120 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,p.kt)("ol",null,(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc785\ub825\ubc1b\uc740 \uac12\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \ub450 \uac1c\uc758 \uc22b\uc790\uc758 \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774 \ucef4\ud3ec\ub10c\ud2b8 \uc5ed\uc2dc \ub450 \uc22b\uc790\uc758 \ud569\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,p.kt)("h2",{id:"component-order"},"Component Order"),(0,p.kt)("h3",{id:"define-order"},"Define Order"),(0,p.kt)("p",null,"\ud544\uc694\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uc744 \ub9cc\ub4e4\uc5c8\uc73c\uba74, \ub2e4\uc74c\uc73c\ub85c\ub294 \uc774\ub4e4\uc758 \uc21c\uc11c\ub97c \uc815\uc758\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \ub9cc\ub4e4 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc21c\uc11c\ub97c \uadf8\ub9bc\uc73c\ub85c \ud45c\ud604\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"pipeline-0.png",src:r(5740).Z,width:"586",height:"262"})),(0,p.kt)("h3",{id:"single-output"},"Single Output"),(0,p.kt)("p",null,"\uc774\uc81c \uc774 \uc21c\uc11c\ub97c \ucf54\ub4dc\ub85c \uc62e\uaca8\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. "),(0,p.kt)("p",null,"\uc6b0\uc120 \uc704\uc758 \uadf8\ub9bc\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," \uacfc ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," \ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uace0 \uadf8 \ubc18\ud658 \uac12\uc744 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_2_result")," \uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc800\uc7a5\ub41c ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc758 \ubc18\ud658 \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output")," \ub97c \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"multi-output"},"Multi Output"),(0,p.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e8\uc77c \uac12\ub9cc\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"output"),"\uc744 \uc774\uc6a9\ud574 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \uc5ec\ub7ec \uac1c\uc758 \ubc18\ud658 \uac12\uc774 \uc788\ub2e4\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"outputs"),"\uc5d0 \uc800\uc7a5\uc774 \ub418\uba70 dict \ud0c0\uc785\uc774\uae30\uc5d0 key\ub97c \uc774\uc6a9\ud574 \uc6d0\ud558\ub294 \ubc18\ud658 \uac12\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \uc55e\uc5d0\uc11c \uc791\uc131\ud55c \uc5ec\ub7ec \uac1c\ub97c \ubc18\ud658\ud558\ub294 ",(0,p.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-component#define-a-standalone-python-function"},"\ucef4\ud3ec\ub10c\ud2b8")," \uc758 \uacbd\uc6b0\ub97c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n",(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number")," \uc758 return \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"quotient")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"remainder")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \uac12\uc744 ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")," \uc5d0 \uc804\ub2ec\ud558\ub294 \uc608\uc2dc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,p.kt)("p",null,(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number"),"\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"divided_result"),"\uc5d0 \uc800\uc7a5\ud558\uace0 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]'),", ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"\ub85c \uac12\uc744 \uac00\uc838\uc62c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,p.kt)("p",null,"\uc774\uc81c \ub2e4\uc2dc \ubcf8\ub860\uc73c\ub85c \ub3cc\uc544\uc640\uc11c \uc774 \ub450 \uac12\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")," \uc5d0 \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \uac01 \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uc744 \ubaa8\uc544\uc11c \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc815\uc758 \ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,p.kt)("p",null,"\ub9c8\uc9c0\ub9c9\uc73c\ub85c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,p.kt)("p",null,"Kubeflow\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 yaml \ud615\uc2dd\uc73c\ub85c\ub9cc \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc5d0 \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc815\ud574\uc9c4 yaml \ud615\uc2dd\uc73c\ub85c \ucef4\ud30c\uc77c(Compile) \ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ucef4\ud30c\uc77c\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("h2",{id:"conclusion"},"Conclusion"),(0,p.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \ubaa8\uc73c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ucef4\ud30c\uc77c\ub41c \uacb0\uacfc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}l.isMDXComponent=!0},5740:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8657],{3905:(n,e,r)=>{r.d(e,{Zo:()=>o,kt:()=>b});var t=r(7294);function p(n,e,r){return e in n?Object.defineProperty(n,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):n[e]=r,n}function u(n,e){var r=Object.keys(n);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(n);e&&(t=t.filter((function(e){return Object.getOwnPropertyDescriptor(n,e).enumerable}))),r.push.apply(r,t)}return r}function a(n){for(var e=1;e=0||(p[r]=n[r]);return p}(n,e);if(Object.getOwnPropertySymbols){var u=Object.getOwnPropertySymbols(n);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(n,r)&&(p[r]=n[r])}return p}var s=t.createContext({}),m=function(n){var e=t.useContext(s),r=e;return n&&(r="function"==typeof n?n(e):a(a({},e),n)),r},o=function(n){var e=m(n.components);return t.createElement(s.Provider,{value:e},n.children)},_="mdxType",l={inlineCode:"code",wrapper:function(n){var e=n.children;return t.createElement(t.Fragment,{},e)}},d=t.forwardRef((function(n,e){var r=n.components,p=n.mdxType,u=n.originalType,s=n.parentName,o=i(n,["components","mdxType","originalType","parentName"]),_=m(r),d=p,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||u;return r?t.createElement(b,a(a({ref:e},o),{},{components:r})):t.createElement(b,a({ref:e},o))}));function b(n,e){var r=arguments,p=e&&e.mdxType;if("string"==typeof n||p){var u=r.length,a=new Array(u);a[0]=d;var i={};for(var s in e)hasOwnProperty.call(e,s)&&(i[s]=e[s]);i.originalType=n,i[_]="string"==typeof n?n:p,a[1]=i;for(var m=2;m{r.r(e),r.d(e,{assets:()=>s,contentTitle:()=>a,default:()=>l,frontMatter:()=>u,metadata:()=>i,toc:()=>m});var t=r(7462),p=(r(7294),r(3905));const u={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},a=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"version-1.0/kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/docs/1.0/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/docs/1.0/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/docs/1.0/kubeflow/basic-pipeline-upload"}},s={},m=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],o={toc:m},_="wrapper";function l(n){let{components:e,...u}=n;return(0,p.kt)(_,(0,t.Z)({},o,u,{components:e,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"pipeline"},"Pipeline"),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub294 \ub3c5\ub9bd\uc801\uc73c\ub85c \uc2e4\ud589\ub418\uc9c0 \uc54a\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uad6c\uc131\uc694\uc18c\ub85c\uc368 \uc2e4\ud589\ub429\ub2c8\ub2e4. \uadf8\ub7ec\ubbc0\ub85c \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud574 \ubcf4\ub824\uba74 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4.\n\uadf8\ub9ac\uace0 \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc791\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uacfc \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc21c\uc11c\uac00 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc22b\uc790\ub97c \uc785\ub825\ubc1b\uace0 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc640 \ub450 \uac1c\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c\ubd80\ud130 \uc22b\uc790\ub97c \ubc1b\uc544\uc11c \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uac00 \uc788\ub294 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ub9cc\ub4e4\uc5b4 \ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h2",{id:"component-set"},"Component Set"),(0,p.kt)("p",null,"\uc6b0\uc120 \ud30c\uc774\ud504\ub77c\uc778\uc5d0\uc11c \uc0ac\uc6a9\ud560 \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,p.kt)("ol",null,(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \uc22b\uc790\ub97c \ucd9c\ub825\ud558\uace0 \ubc18\ud658\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ucef4\ud3ec\ub10c\ud2b8\uac00 \uc785\ub825\ubc1b\uc740 \uac12\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,p.kt)("li",{parentName:"ol"},(0,p.kt)("p",{parentName:"li"},(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,p.kt)("p",{parentName:"li"},"\uc785\ub825\ubc1b\uc740 \ub450 \uac1c\uc758 \uc22b\uc790\uc758 \ud569\uc744 \ucd9c\ub825\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc785\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774 \ucef4\ud3ec\ub10c\ud2b8 \uc5ed\uc2dc \ub450 \uc22b\uc790\uc758 \ud569\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 int\ub97c return\uc758 \ud0c0\uc785 \ud78c\ud2b8\ub85c \uc785\ub825\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",{parentName:"li"},(0,p.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,p.kt)("h2",{id:"component-order"},"Component Order"),(0,p.kt)("h3",{id:"define-order"},"Define Order"),(0,p.kt)("p",null,"\ud544\uc694\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc9d1\ud569\uc744 \ub9cc\ub4e4\uc5c8\uc73c\uba74, \ub2e4\uc74c\uc73c\ub85c\ub294 \uc774\ub4e4\uc758 \uc21c\uc11c\ub97c \uc815\uc758\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c \ub9cc\ub4e4 \ud30c\uc774\ud504\ub77c\uc778\uc758 \uc21c\uc11c\ub97c \uadf8\ub9bc\uc73c\ub85c \ud45c\ud604\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"pipeline-0.png",src:r(5740).Z,width:"586",height:"262"})),(0,p.kt)("h3",{id:"single-output"},"Single Output"),(0,p.kt)("p",null,"\uc774\uc81c \uc774 \uc21c\uc11c\ub97c \ucf54\ub4dc\ub85c \uc62e\uaca8\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. "),(0,p.kt)("p",null,"\uc6b0\uc120 \uc704\uc758 \uadf8\ub9bc\uc5d0\uc11c ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," \uacfc ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," \ub97c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,p.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc2e4\ud589\ud558\uace0 \uadf8 \ubc18\ud658 \uac12\uc744 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_2_result")," \uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\uc800\uc7a5\ub41c ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_result")," \uc758 \ubc18\ud658 \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output")," \ub97c \ud1b5\ud574 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"multi-output"},"Multi Output"),(0,p.kt)("p",null,"\uc704\uc758 \uc608\uc2dc\uc5d0\uc11c \ucef4\ud3ec\ub10c\ud2b8\ub294 \ub2e8\uc77c \uac12\ub9cc\uc744 \ubc18\ud658\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,p.kt)("inlineCode",{parentName:"p"},"output"),"\uc744 \uc774\uc6a9\ud574 \ubc14\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,p.kt)("br",{parentName:"p"}),"\n","\ub9cc\uc57d, \uc5ec\ub7ec \uac1c\uc758 \ubc18\ud658 \uac12\uc774 \uc788\ub2e4\uba74 ",(0,p.kt)("inlineCode",{parentName:"p"},"outputs"),"\uc5d0 \uc800\uc7a5\uc774 \ub418\uba70 dict \ud0c0\uc785\uc774\uae30\uc5d0 key\ub97c \uc774\uc6a9\ud574 \uc6d0\ud558\ub294 \ubc18\ud658 \uac12\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \uc55e\uc5d0\uc11c \uc791\uc131\ud55c \uc5ec\ub7ec \uac1c\ub97c \ubc18\ud658\ud558\ub294 ",(0,p.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-component#define-a-standalone-python-function"},"\ucef4\ud3ec\ub10c\ud2b8")," \uc758 \uacbd\uc6b0\ub97c \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n",(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number")," \uc758 return \uac12\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"quotient")," \uc640 ",(0,p.kt)("inlineCode",{parentName:"p"},"remainder")," \uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub450 \uac12\uc744 ",(0,p.kt)("inlineCode",{parentName:"p"},"print_and_return_number")," \uc5d0 \uc804\ub2ec\ud558\ub294 \uc608\uc2dc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,p.kt)("p",null,(0,p.kt)("inlineCode",{parentName:"p"},"divde_and_return_number"),"\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"divided_result"),"\uc5d0 \uc800\uc7a5\ud558\uace0 \uac01\uac01 ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]'),", ",(0,p.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"\ub85c \uac12\uc744 \uac00\uc838\uc62c \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,p.kt)("p",null,"\uc774\uc81c \ub2e4\uc2dc \ubcf8\ub860\uc73c\ub85c \ub3cc\uc544\uc640\uc11c \uc774 \ub450 \uac12\uc758 \uacb0\uacfc\ub97c ",(0,p.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")," \uc5d0 \uc804\ub2ec\ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \uac01 \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ud544\uc694\ud55c Config\ub4e4\uc744 \ubaa8\uc544\uc11c \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc815\uc758 \ud569\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,p.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,p.kt)("p",null,"\ub9c8\uc9c0\ub9c9\uc73c\ub85c kubeflow\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ud615\uc2dd\uc73c\ub85c \ubcc0\ud658\ud569\ub2c8\ub2e4. \ubcc0\ud658\uc740 ",(0,p.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," \ud568\uc218\ub97c \uc774\uc6a9\ud574 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,p.kt)("p",null,"Kubeflow\uc5d0\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc2e4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 yaml \ud615\uc2dd\uc73c\ub85c\ub9cc \uac00\ub2a5\ud558\uae30 \ub54c\ubb38\uc5d0 \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc744 \uc815\ud574\uc9c4 yaml \ud615\uc2dd\uc73c\ub85c \ucef4\ud30c\uc77c(Compile) \ud574 \uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ucef4\ud30c\uc77c\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("h2",{id:"conclusion"},"Conclusion"),(0,p.kt)("p",null,"\uc55e\uc11c \uc124\uba85\ud55c \ub0b4\uc6a9\uc744 \ud55c \ud30c\uc774\uc36c \ucf54\ub4dc\ub85c \ubaa8\uc73c\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\ucef4\ud30c\uc77c\ub41c \uacb0\uacfc\ub97c \ubcf4\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("details",null,(0,p.kt)("summary",null,"example_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}l.isMDXComponent=!0},5740:(n,e,r)=>{r.d(e,{Z:()=>t});const t=r.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file diff --git a/assets/js/e2bc44c1.d97e1202.js b/assets/js/e2bc44c1.fb60d7dd.js similarity index 99% rename from assets/js/e2bc44c1.d97e1202.js rename to assets/js/e2bc44c1.fb60d7dd.js index 2e180c38..167850f1 100644 --- a/assets/js/e2bc44c1.d97e1202.js +++ b/assets/js/e2bc44c1.fb60d7dd.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5040],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function u(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),o=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):u(u({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=o(n),d=a,b=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return n?r.createElement(b,u(u({ref:t},p),{},{components:n})):r.createElement(b,u({ref:t},p))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,u=new Array(l);u[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:a,u[1]=s;for(var o=2;o{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>u,default:()=>k,frontMatter:()=>l,metadata:()=>s,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const l={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},u=void 0,s={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},i={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc704\ud55c \ub124\ud2b8\uc6cc\ud06c\uc758 \uc124\uc815\uc744 \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{n.d(t,{Zo:()=>p,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function u(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),o=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):u(u({},t),e)),n},p=function(e){var t=o(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=o(n),d=a,b=c["".concat(i,".").concat(d)]||c[d]||k[d]||l;return n?r.createElement(b,u(u({ref:t},p),{},{components:n})):r.createElement(b,u({ref:t},p))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,u=new Array(l);u[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:a,u[1]=s;for(var o=2;o{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>u,default:()=>k,frontMatter:()=>l,metadata:()=>s,toc:()=>o});var r=n(7462),a=(n(7294),n(3905));const l={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},u=void 0,s={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},i={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2},{value:"6. References",id:"6-references",level:2}],p={toc:o},c="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,a.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4\ub97c \uc704\ud55c \ub124\ud2b8\uc6cc\ud06c\uc758 \uc124\uc815\uc744 \ubcc0\uacbd\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{r.d(t,{Zo:()=>u,kt:()=>d});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function l(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var p=o.createContext({}),c=function(e){var t=o.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},u=function(e){var t=c(e.components);return o.createElement(p.Provider,{value:t},e.children)},s="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,i=e.originalType,p=e.parentName,u=a(e,["components","mdxType","originalType","parentName"]),s=c(r),b=n,d=s["".concat(p,".").concat(b)]||s[b]||f[b]||i;return r?o.createElement(d,l(l({ref:t},u),{},{components:r})):o.createElement(d,l({ref:t},u))}));function d(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var i=r.length,l=new Array(i);l[0]=b;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[s]="string"==typeof e?e:n,l[1]=a;for(var c=2;c{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>f,frontMatter:()=>i,metadata:()=>a,toc:()=>c});var o=r(7462),n=(r(7294),r(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},l=void 0,a={unversionedId:"kubeflow/kubeflow-intro",id:"version-1.0/kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/docs/1.0/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/docs/1.0/kubeflow/kubeflow-concepts"}},p={},c=[],u={toc:c},s="wrapper";function f(e){let{components:t,...r}=e;return(0,n.kt)(s,(0,o.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"Kubeflow\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8(Component)\uc640 \ud30c\uc774\ud504\ub77c\uc778(Pipeline)\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline \uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uacfc\ub294 \ub2e4\uc18c \ucc28\uc774\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc5ec\uae30\uc5d0\uc11c\ub294 Kubeflow Pipeline\uc744 \uc6cc\ud06c\ud50c\ub85c(Workflow)\uac00 \uc544\ub2cc \uc55e\uc11c \uc124\uba85\ud55c ",(0,n.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"MLOps\ub97c \uad6c\uc131\ud558\ub294 \uc694\uc18c")," \uc911 \ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubb34\uc5c7\uc774\uba70 \uc5b4\ub5bb\uac8c \uc791\uc131\ud560 \uc218 \uc788\ub294\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}f.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5271],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>d});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function i(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function l(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var p=o.createContext({}),c=function(e){var t=o.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},u=function(e){var t=c(e.components);return o.createElement(p.Provider,{value:t},e.children)},s="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,i=e.originalType,p=e.parentName,u=a(e,["components","mdxType","originalType","parentName"]),s=c(r),b=n,d=s["".concat(p,".").concat(b)]||s[b]||f[b]||i;return r?o.createElement(d,l(l({ref:t},u),{},{components:r})):o.createElement(d,l({ref:t},u))}));function d(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var i=r.length,l=new Array(i);l[0]=b;var a={};for(var p in t)hasOwnProperty.call(t,p)&&(a[p]=t[p]);a.originalType=e,a[s]="string"==typeof e?e:n,l[1]=a;for(var c=2;c{r.r(t),r.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>f,frontMatter:()=>i,metadata:()=>a,toc:()=>c});var o=r(7462),n=(r(7294),r(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},l=void 0,a={unversionedId:"kubeflow/kubeflow-intro",id:"version-1.0/kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/docs/1.0/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/docs/1.0/kubeflow/kubeflow-concepts"}},p={},c=[],u={toc:c},s="wrapper";function f(e){let{components:t,...r}=e;return(0,n.kt)(s,(0,o.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"Kubeflow\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8(Component)\uc640 \ud30c\uc774\ud504\ub77c\uc778(Pipeline)\uc744 \uc791\uc131\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,n.kt)("p",null,(0,n.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uc740 ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline \uacf5\uc2dd \ud648\ud398\uc774\uc9c0"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \ubc29\uc2dd\uacfc\ub294 \ub2e4\uc18c \ucc28\uc774\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc5ec\uae30\uc5d0\uc11c\ub294 Kubeflow Pipeline\uc744 \uc6cc\ud06c\ud50c\ub85c(Workflow)\uac00 \uc544\ub2cc \uc55e\uc11c \uc124\uba85\ud55c ",(0,n.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"MLOps\ub97c \uad6c\uc131\ud558\ub294 \uc694\uc18c")," \uc911 \ud558\ub098\uc758 \ucef4\ud3ec\ub10c\ud2b8\ub85c \uc0ac\uc6a9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,n.kt)("p",null,"\uadf8\ub7fc \uc774\uc81c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ubb34\uc5c7\uc774\uba70 \uc5b4\ub5bb\uac8c \uc791\uc131\ud560 \uc218 \uc788\ub294\uc9c0 \uc54c\uc544\ubcf4\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/e46e340c.92f69db7.js b/assets/js/e46e340c.93dc62fc.js similarity index 99% rename from assets/js/e46e340c.92f69db7.js rename to assets/js/e46e340c.93dc62fc.js index 175e3f25..201463a5 100644 --- a/assets/js/e46e340c.92f69db7.js +++ b/assets/js/e46e340c.93dc62fc.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[204],{3905:(e,n,a)=>{a.d(n,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var o=t.createContext({}),s=function(e){var n=t.useContext(o),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},d=function(e){var n=s(e.components);return t.createElement(o.Provider,{value:n},e.children)},m="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,o=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(o,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:n},d),{},{components:a})):t.createElement(c,i({ref:n},d))}));function c(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,i=new Array(l);i[0]=u;var p={};for(var o in n)hasOwnProperty.call(n,o)&&(p[o]=n[o]);p.originalType=e,p[m]="string"==typeof e?e:r,i[1]=p;for(var s=2;s{a.r(n),a.d(n,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-mlflow",id:"version-1.0/kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/docs/1.0/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/docs/1.0/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/docs/1.0/kubeflow/how-to-debug"}},o={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. \ubaa8\ub378 \ud559\uc2b5",id:"1-\ubaa8\ub378-\ud559\uc2b5",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(e){let{components:n,...l}=e;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-component"},"Advanced Usage Component")," \uc5d0\uc11c \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 API Deployment\uae4c\uc9c0 \uc774\uc5b4\uc9c0\uae30 \uc704\ud574\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"MLFlow\uc5d0\uc11c \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uace0 \uc11c\ube59\uc5d0\uc11c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uc758 \ud56d\ubaa9\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"\ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \ud1b5\ud574\uc11c MLFLow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"1-\ubaa8\ub378-\ud559\uc2b5"},"1. \ubaa8\ub378 \ud559\uc2b5"),(0,r.kt)("p",null,"\uc544\ub798 \uacfc\uc815\uc740 iris \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 SVC \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"mlflow\uc5d0 \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \ub9cc\ub4dc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"\uac01 \ubcc0\uc218\uc758 \ub0b4\uc6a9\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \ud559\uc2b5\ud55c \uc815\ubcf4\ub4e4\uacfc \ubaa8\ub378\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4.\n\ud559\uc2b5\ud55c \ubaa8\ub378\uc774 sklearn \ud328\ud0a4\uc9c0\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn")," \uc744 \uc774\uc6a9\ud558\uba74 \uc27d\uac8c \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uc791\uc5c5\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 svc \ud3f4\ub354\uac00 \uc0dd\uae30\uba70 \uc544\ub798\uc640 \uac19\uc740 \ud30c\uc77c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uc758 \ucd9c\ub825\uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"\uac01 \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"\uc774\uc81c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 mlflow \uc11c\ubc84\uc5d0 \uc62c\ub9ac\ub294 \uc791\uc5c5\uc744 \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"\uc800\uc7a5\ud558\uace0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," \uac00 \uc0dd\uc131\ub41c \uacbd\ub85c\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 mlflow \uc11c\ubc84\uc640 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ub744\uc6c1\ub2c8\ub2e4.\nmlflow \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud558\uc5ec \uc0dd\uc131\ub41c run\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(3810).Z,width:"2782",height:"2496"}),"\n(\ud574\ub2f9 \ud654\uba74\uc740 mlflow \ubc84\uc804\uc5d0 \ub530\ub77c \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"\uc774\uc81c Kubeflow\uc5d0\uc11c \uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc740 \ud06c\uac8c 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \ud544\uc694\ud55c \ud658\uacbd\uc744 \uc800\uc7a5 \ud6c4 MLFlow \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc5c5\ub85c\ub4dc\ub9cc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(8705).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uacfc \ub370\uc774\ud130\ub97c MLFlow \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \uc804\ub2ec \ud6c4 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(9481).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc\ub97c \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(3268).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"\uc800\ud76c\ub294 \uc774 \uc911 1\ubc88\uc758 \uc811\uadfc \ubc29\ubc95\uc744 \ud1b5\ud574 \ubaa8\ub378\uc744 \uad00\ub9ac\ud558\ub824\uace0 \ud569\ub2c8\ub2e4.\n\uc774\uc720\ub294 MLFlow \ubaa8\ub378\uc744 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucf54\ub4dc\ub294 \ubc14\ub00c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \ub9e4\ubc88 3\ubc88\ucc98\ub7fc \ucef4\ud3ec\ub10c\ud2b8 \uc791\uc131\ub9c8\ub2e4 \uc791\uc131\ud560 \ud544\uc694\ub294 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc7ac\ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc740 1\ubc88\uacfc 2\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uac00\ub2a5\ud569\ub2c8\ub2e4.\n\ub2e4\ub9cc 2\ubc88\uc758 \uacbd\uc6b0 \ubaa8\ub378\uc774 \ud559\uc2b5\ub41c \uc774\ubbf8\uc9c0\uc640 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc804\ub2ec\ud574\uc57c \ud558\ubbc0\ub85c \uacb0\uad6d \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \ucd94\uac00 \uc815\ubcf4\ub97c \uc804\ub2ec\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"1\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uc9c4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub610\ud55c \ubcc0\uacbd\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294\ub370 \ud544\uc694\ud55c \ud658\uacbd\ub4e4\uc744 \uc800\uc7a5\ud574\uc8fc\ub294 \ucf54\ub4dc\uac00 \ucd94\uac00\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 MLFlow\uc5d0 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4.\n\uc774 \ub54c \uc5c5\ub85c\ub4dc\ub418\ub294 MLflow\uc758 endpoint\ub97c \uc6b0\ub9ac\uac00 \uc124\uce58\ud55c ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-mlflow"},"mlflow service")," \ub85c \uc774\uc5b4\uc9c0\uac8c \uc124\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c S3 Endpoint\uc758 \uc8fc\uc18c\ub294 MLflow Server \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c minio\uc758 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/services-networking/dns-pod-service/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9"),"\ud569\ub2c8\ub2e4. \ud574\ub2f9 service \ub294 kubeflow namespace\uc5d0\uc11c minio-service\ub77c\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\uc73c\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\uc640 \ube44\uc2b7\ud558\uac8c tracking_uri\uc758 \uc8fc\uc18c\ub294 mlflow server\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc5f0\uacb0\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \ub9cc\ub4e4\uc5b4 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"\ubaa8\ub378\uc744 \ud559\uc2b5\ud560 \ub54c \uc4f8 \ub370\uc774\ud130\ub294 sklearn\uc758 iris \uc785\ub2c8\ub2e4.\n\ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778 \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ud558\ub098\uc758 \ud30c\uc774\uc36c \ud30c\uc77c\uc5d0 \uc815\ub9ac\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"\uc2e4\ud589\ud6c4 \uc0dd\uc131\ub41c mlflow_pipeline.yaml \ud30c\uc77c\uc744 \ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ud55c \ud6c4, \uc2e4\ud589\ud558\uc5ec run \uc758 \uacb0\uacfc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(1822).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"mlflow service\ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud574\uc11c MLflow ui\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 localhost:5000\uc73c\ub85c \uc811\uc18d\ud558\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 run\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(339).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"run \uc744 \ud074\ub9ad\ud574\uc11c \ud655\uc778\ud558\uba74 \ud559\uc2b5\ud55c \ubaa8\ub378 \ud30c\uc77c\uc774 \uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(7463).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},3810:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},8705:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},9481:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},3268:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},1822:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},339:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},7463:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[204],{3905:(e,n,a)=>{a.d(n,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var o=t.createContext({}),s=function(e){var n=t.useContext(o),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},d=function(e){var n=s(e.components);return t.createElement(o.Provider,{value:n},e.children)},m="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,o=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(o,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:n},d),{},{components:a})):t.createElement(c,i({ref:n},d))}));function c(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,i=new Array(l);i[0]=u;var p={};for(var o in n)hasOwnProperty.call(n,o)&&(p[o]=n[o]);p.originalType=e,p[m]="string"==typeof e?e:r,i[1]=p;for(var s=2;s{a.r(n),a.d(n,{assets:()=>o,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-mlflow",id:"version-1.0/kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/docs/1.0/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/docs/1.0/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/docs/1.0/kubeflow/how-to-debug"}},o={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. \ubaa8\ub378 \ud559\uc2b5",id:"1-\ubaa8\ub378-\ud559\uc2b5",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(e){let{components:n,...l}=e;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/advanced-component"},"Advanced Usage Component")," \uc5d0\uc11c \ud559\uc2b5\ud55c \ubaa8\ub378\uc774 API Deployment\uae4c\uc9c0 \uc774\uc5b4\uc9c0\uae30 \uc704\ud574\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 MLFlow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \uacfc\uc815\uc744 \uc124\uba85\ud569\ub2c8\ub2e4."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"MLFlow\uc5d0\uc11c \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\uace0 \uc11c\ube59\uc5d0\uc11c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c\ub294 \ub2e4\uc74c\uc758 \ud56d\ubaa9\ub4e4\uc774 \ud544\uc694\ud569\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"\ud30c\uc774\uc36c \ucf54\ub4dc\ub97c \ud1b5\ud574\uc11c MLFLow\uc5d0 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uacfc\uc815\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"1-\ubaa8\ub378-\ud559\uc2b5"},"1. \ubaa8\ub378 \ud559\uc2b5"),(0,r.kt)("p",null,"\uc544\ub798 \uacfc\uc815\uc740 iris \ub370\uc774\ud130\ub97c \uc774\uc6a9\ud574 SVC \ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"mlflow\uc5d0 \ud544\uc694\ud55c \uc815\ubcf4\ub4e4\uc744 \ub9cc\ub4dc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"\uac01 \ubcc0\uc218\uc758 \ub0b4\uc6a9\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c \ud559\uc2b5\ud55c \uc815\ubcf4\ub4e4\uacfc \ubaa8\ub378\uc744 \uc800\uc7a5\ud569\ub2c8\ub2e4.\n\ud559\uc2b5\ud55c \ubaa8\ub378\uc774 sklearn \ud328\ud0a4\uc9c0\ub97c \uc774\uc6a9\ud558\uae30 \ub54c\ubb38\uc5d0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn")," \uc744 \uc774\uc6a9\ud558\uba74 \uc27d\uac8c \ubaa8\ub378\uc744 \uc800\uc7a5\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"\ub85c\uceec\uc5d0\uc11c \uc791\uc5c5\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 svc \ud3f4\ub354\uac00 \uc0dd\uae30\uba70 \uc544\ub798\uc640 \uac19\uc740 \ud30c\uc77c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"\uc704\uc758 \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud558\uba74 \ub2e4\uc74c\uc758 \ucd9c\ub825\uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"\uac01 \ud30c\uc77c\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"\uc774\uc81c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 mlflow \uc11c\ubc84\uc5d0 \uc62c\ub9ac\ub294 \uc791\uc5c5\uc744 \ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"\uc800\uc7a5\ud558\uace0 ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," \uac00 \uc0dd\uc131\ub41c \uacbd\ub85c\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," \uba85\ub839\uc5b4\ub97c \uc774\uc6a9\ud574 mlflow \uc11c\ubc84\uc640 \ub300\uc2dc\ubcf4\ub4dc\ub97c \ub744\uc6c1\ub2c8\ub2e4.\nmlflow \ub300\uc2dc\ubcf4\ub4dc\uc5d0 \uc811\uc18d\ud558\uc5ec \uc0dd\uc131\ub41c run\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ubcf4\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(3810).Z,width:"2782",height:"2496"}),"\n(\ud574\ub2f9 \ud654\uba74\uc740 mlflow \ubc84\uc804\uc5d0 \ub530\ub77c \ub2e4\ub97c \uc218 \uc788\uc2b5\ub2c8\ub2e4.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"\uc774\uc81c Kubeflow\uc5d0\uc11c \uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,"\uc7ac\uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud558\ub294 \ubc29\ubc95\uc740 \ud06c\uac8c 3\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \ud544\uc694\ud55c \ud658\uacbd\uc744 \uc800\uc7a5 \ud6c4 MLFlow \ucef4\ud3ec\ub10c\ud2b8\ub294 \uc5c5\ub85c\ub4dc\ub9cc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(8705).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud559\uc2b5\ub41c \ubaa8\ub378\uacfc \ub370\uc774\ud130\ub97c MLFlow \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \uc804\ub2ec \ud6c4 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(9481).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ubaa8\ub378\uc744 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc800\uc7a5\uacfc \uc5c5\ub85c\ub4dc\ub97c \ub2f4\ub2f9"),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(3268).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"\uc800\ud76c\ub294 \uc774 \uc911 1\ubc88\uc758 \uc811\uadfc \ubc29\ubc95\uc744 \ud1b5\ud574 \ubaa8\ub378\uc744 \uad00\ub9ac\ud558\ub824\uace0 \ud569\ub2c8\ub2e4.\n\uc774\uc720\ub294 MLFlow \ubaa8\ub378\uc744 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucf54\ub4dc\ub294 \ubc14\ub00c\uc9c0 \uc54a\uae30 \ub54c\ubb38\uc5d0 \ub9e4\ubc88 3\ubc88\ucc98\ub7fc \ucef4\ud3ec\ub10c\ud2b8 \uc791\uc131\ub9c8\ub2e4 \uc791\uc131\ud560 \ud544\uc694\ub294 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,r.kt)("p",null,"\ucef4\ud3ec\ub10c\ud2b8\ub97c \uc7ac\ud65c\uc6a9\ud558\ub294 \ubc29\ubc95\uc740 1\ubc88\uacfc 2\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uac00\ub2a5\ud569\ub2c8\ub2e4.\n\ub2e4\ub9cc 2\ubc88\uc758 \uacbd\uc6b0 \ubaa8\ub378\uc774 \ud559\uc2b5\ub41c \uc774\ubbf8\uc9c0\uc640 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc804\ub2ec\ud574\uc57c \ud558\ubbc0\ub85c \uacb0\uad6d \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \ucd94\uac00 \uc815\ubcf4\ub97c \uc804\ub2ec\ud574\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,"1\ubc88\uc758 \ubc29\ubc95\uc73c\ub85c \uc9c4\ud589\ud558\uae30 \uc704\ud574\uc11c\ub294 \ud559\uc2b5\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8 \ub610\ud55c \ubcc0\uacbd\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294\ub370 \ud544\uc694\ud55c \ud658\uacbd\ub4e4\uc744 \uc800\uc7a5\ud574\uc8fc\ub294 \ucf54\ub4dc\uac00 \ucd94\uac00\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"\uadf8\ub9ac\uace0 MLFlow\uc5d0 \uc5c5\ub85c\ub4dc\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4.\n\uc774 \ub54c \uc5c5\ub85c\ub4dc\ub418\ub294 MLflow\uc758 endpoint\ub97c \uc6b0\ub9ac\uac00 \uc124\uce58\ud55c ",(0,r.kt)("a",{parentName:"p",href:"/docs/1.0/setup-components/install-components-mlflow"},"mlflow service")," \ub85c \uc774\uc5b4\uc9c0\uac8c \uc124\uc815\ud574\uc8fc\uc5b4\uc57c \ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774 \ub54c S3 Endpoint\uc758 \uc8fc\uc18c\ub294 MLflow Server \uc124\uce58 \ub2f9\uc2dc \uc124\uce58\ud55c minio\uc758 ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/services-networking/dns-pod-service/"},"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9"),"\ud569\ub2c8\ub2e4. \ud574\ub2f9 service \ub294 kubeflow namespace\uc5d0\uc11c minio-service\ub77c\ub294 \uc774\ub984\uc73c\ub85c \uc0dd\uc131\ub418\uc5c8\uc73c\ubbc0\ub85c, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.",(0,r.kt)("br",{parentName:"p"}),"\n","\uc774\uc640 \ube44\uc2b7\ud558\uac8c tracking_uri\uc758 \uc8fc\uc18c\ub294 mlflow server\uc758 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uc11c\ube44\uc2a4 DNS \ub124\uc784\uc744 \ud65c\uc6a9\ud558\uc5ec, ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000")," \ub85c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"\uc774\uc81c \uc791\uc131\ud55c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc5f0\uacb0\ud574\uc11c \ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \ub9cc\ub4e4\uc5b4 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"\ubaa8\ub378\uc744 \ud559\uc2b5\ud560 \ub54c \uc4f8 \ub370\uc774\ud130\ub294 sklearn\uc758 iris \uc785\ub2c8\ub2e4.\n\ub370\uc774\ud130\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778 \ucf54\ub4dc\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"\uc704\uc5d0\uc11c \uc791\uc131\ub41c \ucef4\ud3ec\ub10c\ud2b8\uc640 \ud30c\uc774\ud504\ub77c\uc778\uc744 \ud558\ub098\uc758 \ud30c\uc774\uc36c \ud30c\uc77c\uc5d0 \uc815\ub9ac\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"\uc2e4\ud589\ud6c4 \uc0dd\uc131\ub41c mlflow_pipeline.yaml \ud30c\uc77c\uc744 \ud30c\uc774\ud504\ub77c\uc778 \uc5c5\ub85c\ub4dc\ud55c \ud6c4, \uc2e4\ud589\ud558\uc5ec run \uc758 \uacb0\uacfc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(1822).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"mlflow service\ub97c \ud3ec\ud2b8\ud3ec\uc6cc\ub529\ud574\uc11c MLflow ui\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"\uc6f9 \ube0c\ub77c\uc6b0\uc800\ub97c \uc5f4\uc5b4 localhost:5000\uc73c\ub85c \uc811\uc18d\ud558\uba74, \ub2e4\uc74c\uacfc \uac19\uc774 run\uc774 \uc0dd\uc131\ub41c \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(339).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"run \uc744 \ud074\ub9ad\ud574\uc11c \ud655\uc778\ud558\uba74 \ud559\uc2b5\ud55c \ubaa8\ub378 \ud30c\uc77c\uc774 \uc788\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(7463).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},3810:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},8705:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},9481:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},3268:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},1822:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},339:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},7463:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file diff --git a/assets/js/e7600b97.26de1f81.js b/assets/js/e7600b97.414024f5.js similarity index 99% rename from assets/js/e7600b97.26de1f81.js rename to assets/js/e7600b97.414024f5.js index e4eb8e0a..c5480ffb 100644 --- a/assets/js/e7600b97.26de1f81.js +++ b/assets/js/e7600b97.414024f5.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[799],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var m=l.createContext({}),s=function(e){var n=l.useContext(m),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=s(e.components);return l.createElement(m.Provider,{value:n},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,m=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=s(t),u=a,f=c["".concat(m,".").concat(u)]||c[u]||d[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var m in n)hasOwnProperty.call(n,m)&&(i[m]=n[m]);i.originalType=e,i[c]="string"==typeof e?e:a,r[1]=i;for(var s=2;s{t.r(n),t.d(n,{assets:()=>m,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/docs/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/docs/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/docs/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/docs/api-deployment/seldon-children"}},m={},s=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API \uc0dd\uc131",id:"api-\uc0dd\uc131",level:2}],p={toc:s},c="wrapper";function d(e){let{components:n,...o}=e;return(0,a.kt)(c,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,a.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-mlflow"},"MLflow Component"),"\uc5d0\uc11c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 API\ub97c \uc0dd\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"initContainer\uac00 minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc73c\ub824\uba74 credentials\uac00 \ud544\uc694\ud569\ub2c8\ub2e4.\nminio\uc5d0 \uc811\uadfc\ud558\uae30 \uc704\ud55c credentials\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," \uc758 \uc785\ub825\uac12\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),"\uc785\ub2c8\ub2e4. \ub2e4\ub9cc secret\uc758 \uc785\ub825\uac12\uc740 \uc778\ucf54\ub529\ub41c \uac12\uc774\uc5ec\uc57c \ub418\uae30 \ub54c\ubb38\uc5d0 \uc2e4\uc81c\ub85c \uc785\ub825\ub418\ub294 \uac12\uc740 \ub2e4\uc74c\uc744 \uc218\ud589\ud6c4 \ub098\uc624\ub294 \uac12\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"data\uc5d0 \uc785\ub825\ub418\uc5b4\uc57c \ud558\ub294 \uac12\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uac12\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc744 \uc804\uccb4 \uac12\uc5d0 \ub300\ud574\uc11c \uc9c4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: bWluaW8="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: bWluaW8xMjM="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA="),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: ZmFsc2U=")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\ub294 yaml\ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"\uc774\uc81c Seldon Core\ub97c \uc0dd\uc131\ud558\ub294 yaml\ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"\uc774 \uc804\uc5d0 \uc791\uc131\ud55c ",(0,a.kt)("a",{parentName:"p",href:"/docs/api-deployment/seldon-fields"},"Seldon Fields"),"\uc640 \ub2ec\ub77c\uc9c4 \uc810\uc740 \ud06c\uac8c \ub450 \ubd80\ubd84\uc785\ub2c8\ub2e4.\ninitContainer\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"envFrom")," \ud544\ub4dc\uac00 \ucd94\uac00\ub418\uc5c8\uc73c\uba70 args\uc758 \uc8fc\uc18c\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc")," \ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"\uc55e\uc11c args\uc758 \uccab\ubc88\uc9f8 array\ub294 \uc6b0\ub9ac\uac00 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uacbd\ub85c\ub77c\uace0 \ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7fc mlflow\uc5d0 \uc800\uc7a5\ub41c \ubaa8\ub378\uc758 \uacbd\ub85c\ub294 \uc5b4\ub5bb\uac8c \uc54c \uc218 \uc788\uc744\uae4c\uc694?"),(0,a.kt)("p",null,"\ub2e4\uc2dc mlflow\uc5d0 \ub4e4\uc5b4\uac00\uc11c run\uc744 \ud074\ub9ad\ud558\uace0 \ubaa8\ub378\uc744 \ub204\ub974\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(8663).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"\uc774\ub807\uac8c \ud655\uc778\ub41c \uacbd\ub85c\ub97c \uc785\ub825\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ub370 \ud544\uc694\ud55c \ud658\uacbd\ubcc0\uc218\ub97c \uc785\ub825\ud574\uc8fc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4.\n\uc55e\uc11c \ub9cc\ub4e0 ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret"),"\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"api-\uc0dd\uc131"},"API \uc0dd\uc131"),(0,a.kt)("p",null,"\uc6b0\uc120 \uc704\uc5d0\uc11c \uc815\uc758\ud55c \uc2a4\ud399\uc744 yaml \ud30c\uc77c\ub85c \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"seldon pod\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"\uc774\uc81c pod\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub730 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c API\ub97c \uc0dd\uc131\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"CLI\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ub41c API\uc5d0\ub294 \ub2e4\uc74c request\ub97c \ud1b5\ud574 \uc2e4\ud589\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}d.isMDXComponent=!0},8663:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[799],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var m=l.createContext({}),s=function(e){var n=l.useContext(m),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=s(e.components);return l.createElement(m.Provider,{value:n},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,m=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),c=s(t),u=a,f=c["".concat(m,".").concat(u)]||c[u]||d[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var m in n)hasOwnProperty.call(n,m)&&(i[m]=n[m]);i.originalType=e,i[c]="string"==typeof e?e:a,r[1]=i;for(var s=2;s{t.r(n),t.d(n,{assets:()=>m,contentTitle:()=>r,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>s});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/docs/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/docs/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/docs/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/docs/api-deployment/seldon-children"}},m={},s=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API \uc0dd\uc131",id:"api-\uc0dd\uc131",level:2}],p={toc:s},c="wrapper";function d(e){let{components:n,...o}=e;return(0,a.kt)(c,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 ",(0,a.kt)("a",{parentName:"p",href:"/docs/kubeflow/advanced-mlflow"},"MLflow Component"),"\uc5d0\uc11c \uc800\uc7a5\ub41c \ubaa8\ub378\uc744 \uc774\uc6a9\ud574 API\ub97c \uc0dd\uc131\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud574\uc11c \uc54c\uc544\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"initContainer\uac00 minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc73c\ub824\uba74 credentials\uac00 \ud544\uc694\ud569\ub2c8\ub2e4.\nminio\uc5d0 \uc811\uadfc\ud558\uae30 \uc704\ud55c credentials\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," \uc758 \uc785\ub825\uac12\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),"\uc785\ub2c8\ub2e4. \ub2e4\ub9cc secret\uc758 \uc785\ub825\uac12\uc740 \uc778\ucf54\ub529\ub41c \uac12\uc774\uc5ec\uc57c \ub418\uae30 \ub54c\ubb38\uc5d0 \uc2e4\uc81c\ub85c \uc785\ub825\ub418\ub294 \uac12\uc740 \ub2e4\uc74c\uc744 \uc218\ud589\ud6c4 \ub098\uc624\ub294 \uac12\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"data\uc5d0 \uc785\ub825\ub418\uc5b4\uc57c \ud558\ub294 \uac12\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc740 \ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574\uc11c \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"\uadf8\ub7ec\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uac12\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"\uc778\ucf54\ub529\uc744 \uc804\uccb4 \uac12\uc5d0 \ub300\ud574\uc11c \uc9c4\ud589\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: bWluaW8="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: bWluaW8xMjM="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA="),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: ZmFsc2U=")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\ub294 yaml\ud30c\uc77c\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 secret\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"\uc774\uc81c Seldon Core\ub97c \uc0dd\uc131\ud558\ub294 yaml\ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"\uc774 \uc804\uc5d0 \uc791\uc131\ud55c ",(0,a.kt)("a",{parentName:"p",href:"/docs/api-deployment/seldon-fields"},"Seldon Fields"),"\uc640 \ub2ec\ub77c\uc9c4 \uc810\uc740 \ud06c\uac8c \ub450 \ubd80\ubd84\uc785\ub2c8\ub2e4.\ninitContainer\uc5d0 ",(0,a.kt)("inlineCode",{parentName:"p"},"envFrom")," \ud544\ub4dc\uac00 \ucd94\uac00\ub418\uc5c8\uc73c\uba70 args\uc758 \uc8fc\uc18c\uac00 ",(0,a.kt)("inlineCode",{parentName:"p"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc")," \ub85c \ubc14\ub00c\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"\uc55e\uc11c args\uc758 \uccab\ubc88\uc9f8 array\ub294 \uc6b0\ub9ac\uac00 \ub2e4\uc6b4\ub85c\ub4dc\ubc1b\uc744 \ubaa8\ub378\uc758 \uacbd\ub85c\ub77c\uace0 \ud588\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uadf8\ub7fc mlflow\uc5d0 \uc800\uc7a5\ub41c \ubaa8\ub378\uc758 \uacbd\ub85c\ub294 \uc5b4\ub5bb\uac8c \uc54c \uc218 \uc788\uc744\uae4c\uc694?"),(0,a.kt)("p",null,"\ub2e4\uc2dc mlflow\uc5d0 \ub4e4\uc5b4\uac00\uc11c run\uc744 \ud074\ub9ad\ud558\uace0 \ubaa8\ub378\uc744 \ub204\ub974\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(8663).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"\uc774\ub807\uac8c \ud655\uc778\ub41c \uacbd\ub85c\ub97c \uc785\ub825\ud558\uba74 \ub429\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"minio\uc5d0 \uc811\uadfc\ud574\uc11c \ubaa8\ub378\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \ub370 \ud544\uc694\ud55c \ud658\uacbd\ubcc0\uc218\ub97c \uc785\ub825\ud574\uc8fc\ub294 \uacfc\uc815\uc785\ub2c8\ub2e4.\n\uc55e\uc11c \ub9cc\ub4e0 ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret"),"\ub97c \uc774\uc6a9\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"api-\uc0dd\uc131"},"API \uc0dd\uc131"),(0,a.kt)("p",null,"\uc6b0\uc120 \uc704\uc5d0\uc11c \uc815\uc758\ud55c \uc2a4\ud399\uc744 yaml \ud30c\uc77c\ub85c \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"seldon pod\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"\uc774\uc81c pod\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub730 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"\ub2e4\uc74c\uacfc \ube44\uc2b7\ud558\uac8c \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c API\ub97c \uc0dd\uc131\ud588\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"CLI\ub97c \uc774\uc6a9\ud574 \uc0dd\uc131\ub41c API\uc5d0\ub294 \ub2e4\uc74c request\ub97c \ud1b5\ud574 \uc2e4\ud589\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc2e4\ud589\ub420 \uacbd\uc6b0 \ub2e4\uc74c\uacfc \uac19\uc740 \uacb0\uacfc\ub97c \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}d.isMDXComponent=!0},8663:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file diff --git a/assets/js/e8d17a59.7d4f0c66.js b/assets/js/e8d17a59.dfd0edd5.js similarity index 97% rename from assets/js/e8d17a59.7d4f0c66.js rename to assets/js/e8d17a59.dfd0edd5.js index e5f1a9fe..b1b72eea 100644 --- a/assets/js/e8d17a59.7d4f0c66.js +++ b/assets/js/e8d17a59.dfd0edd5.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1526],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(r),m=o,f=c["".concat(l,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,i(i({ref:t},u),{},{components:r})):n.createElement(f,i({ref:t},u))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var p=2;p{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"version-1.0/kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/docs/1.0/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"}},l={},p=[],u={toc:p},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(AutoML)\uc744 \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(2815).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"Experiments(AutoML) \ud398\uc774\uc9c0\ub294 Kubeflow\uc5d0\uc11c Hyperparameter Tuning\uacfc Neural Architecture Search\ub97c \ud1b5\ud55c AutoML\uc744 \ub2f4\ub2f9\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),"\ub97c \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"Katib\uc640 Experiments(AutoML)\uc5d0 \ub300\ud55c \uc0ac\uc6a9\ubc95\uc740 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," v1.0\uc5d0\uc11c\ub294 \ub2e4\ub8e8\uc9c0 \uc54a\uc73c\uba70, v2.0\uc5d0 \ucd94\uac00\ub420 \uc608\uc815\uc785\ub2c8\ub2e4."))}d.isMDXComponent=!0},2815:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1526],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>f});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),p=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=p(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),c=p(r),m=o,f=c["".concat(l,".").concat(m)]||c[m]||d[m]||a;return r?n.createElement(f,i(i({ref:t},u),{},{components:r})):n.createElement(f,i({ref:t},u))}));function f(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var p=2;p{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>p});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"version-1.0/kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/docs/1.0/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline \uad00\ub828",permalink:"/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"}},l={},p=[],u={toc:p},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"\ub2e4\uc74c\uc73c\ub85c\ub294 Central Dashboard\uc758 \uc67c\ucabd \ud0ed\uc758 Experiments(AutoML)\uc744 \ud074\ub9ad\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(6316).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(2815).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"Experiments(AutoML) \ud398\uc774\uc9c0\ub294 Kubeflow\uc5d0\uc11c Hyperparameter Tuning\uacfc Neural Architecture Search\ub97c \ud1b5\ud55c AutoML\uc744 \ub2f4\ub2f9\ud558\ub294 ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),"\ub97c \uad00\ub9ac\ud560 \uc218 \uc788\ub294 \ud398\uc774\uc9c0\uc785\ub2c8\ub2e4."),(0,o.kt)("p",null,"Katib\uc640 Experiments(AutoML)\uc5d0 \ub300\ud55c \uc0ac\uc6a9\ubc95\uc740 ",(0,o.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," v1.0\uc5d0\uc11c\ub294 \ub2e4\ub8e8\uc9c0 \uc54a\uc73c\uba70, v2.0\uc5d0 \ucd94\uac00\ub420 \uc608\uc815\uc785\ub2c8\ub2e4."))}d.isMDXComponent=!0},2815:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},6316:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/assets/js/f1d05694.ab6b7e87.js b/assets/js/f1d05694.bf154a45.js similarity index 99% rename from assets/js/f1d05694.ab6b7e87.js rename to assets/js/f1d05694.bf154a45.js index 00cfa398..bc2d7ea3 100644 --- a/assets/js/f1d05694.ab6b7e87.js +++ b/assets/js/f1d05694.bf154a45.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8084],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>k});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=r.createContext({}),u=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},s=function(e){var t=u(e.components);return r.createElement(p.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,l=e.originalType,p=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),d=u(n),m=i,k=d["".concat(p,".").concat(m)]||d[m]||c[m]||l;return n?r.createElement(k,a(a({ref:t},s),{},{components:n})):r.createElement(k,a({ref:t},s))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var l=n.length,a=new Array(l);a[0]=m;var o={};for(var p in t)hasOwnProperty.call(t,p)&&(o[p]=t[p]);o.originalType=e,o[d]="string"==typeof e?e:i,a[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>a,default:()=>c,frontMatter:()=>l,metadata:()=>o,toc:()=>u});var r=n(7462),i=(n(7294),n(3905));const l={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},a=void 0,o={unversionedId:"introduction/component",id:"introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/docs/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/docs/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/docs/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/docs/introduction/why_kubernetes"}},p={},u=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model training",id:"3-model-training",level:3},{value:"4. Model evaluation",id:"4-model-evaluation",level:3},{value:"5. Model serving",id:"5-model-serving",level:3},{value:"6. Online experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],s={toc:u},d="wrapper";function c(e){let{components:t,...l}=e;return(0,i.kt)(d,(0,r.Z)({},s,l,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,i.kt)("p",null," 2021\ub144 5\uc6d4\uc5d0 \ubc1c\ud45c\ub41c \uad6c\uae00\uc758 ",(0,i.kt)("a",{parentName:"p",href:"https://services.google.com/fh/files/misc/practitioners_guide_to_mlops_whitepaper.pdf"},"white paper : Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning"),"\uc5d0\uc11c\ub294 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\ub4e4\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc744 \uc5b8\uae09\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"mlops-component",src:n(878).Z,width:"2352",height:"1890"})),(0,i.kt)("p",null," \uac01 \uae30\ub2a5\uc774 \uc5b4\ub5a4 \uc5ed\ud560\uc744 \ud558\ub294\uc9c0 \uc0b4\ud3b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,i.kt)("p",null," \uc2e4\ud5d8(Experimentation)\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub370\uc774\ud130\ub97c \ubd84\uc11d\ud558\uace0, \ud504\ub85c\ud1a0\ud0c0\uc785 \ubaa8\ub378\uc744 \ub9cc\ub4e4\uba70 \ud559\uc2b5 \uae30\ub2a5\uc744 \uad6c\ud604\ud560 \uc218 \uc788\ub3c4\ub85d \ud558\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\uae43(Git)\uacfc \uac19\uc740 \ubc84\uc804 \ucee8\ud2b8\ub864 \ub3c4\uad6c\uc640 \ud1b5\ud569\ub41c \ub178\ud2b8\ubd81(Jupyter Notebook) \ud658\uacbd \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\ud55c \ub370\uc774\ud130, \ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130, \ud3c9\uac00 \uc9c0\ud45c\ub97c \ud3ec\ud568\ud55c \uc2e4\ud5d8 \ucd94\uc801 \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc640 \ubaa8\ub378\uc5d0 \ub300\ud55c \ubd84\uc11d \ubc0f \uc2dc\uac01\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,i.kt)("p",null," \ub370\uc774\ud130 \ucc98\ub9ac(Data Processing)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378 \uac1c\ubc1c \ub2e8\uacc4, \uc9c0\uc18d\uc801\uc778 \ud559\uc2b5(Continuous Training) \ub2e8\uacc4, \uadf8\ub9ac\uace0 API \ubc30\ud3ec(API Deployment) \ub2e8\uacc4\uc5d0\uc11c \ub9ce\uc740 \uc591\uc758 \ub370\uc774\ud130\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ub370\uc774\ud130 \uc18c\uc2a4\uc640 \uc11c\ube44\uc2a4\uc5d0 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \ucee4\ub125\ud130(connector) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \uc778\ucf54\ub354(encoder) & \ub514\ucf54\ub354(decoder) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ub370\uc774\ud130 \ubcc0\ud658\uacfc \ud53c\ucc98 \uc5d4\uc9c0\ub2c8\uc5b4\ub9c1(feature engineering) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ud559\uc2b5\uacfc \uc11c\ube59\uc744 \uc704\ud55c \ud655\uc7a5 \uac00\ub2a5\ud55c \ubc30\uce58, \uc2a4\ud2b8\ub9bc \ub370\uc774\ud130 \ucc98\ub9ac \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"3-model-training"},"3. Model training"),(0,i.kt)("p",null," \ubaa8\ub378 \ud559\uc2b5(Model training)\uc740 \ubaa8\ub378 \ud559\uc2b5\uc744 \uc704\ud55c \uc54c\uace0\ub9ac\uc998\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc2e4\ud589\uc2dc\ucf1c\uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"ML \ud504\ub808\uc784\uc6cc\ud06c\uc758 \uc2e4\ud589\uc744 \uc704\ud55c \ud658\uacbd \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc218\uc758 GPU / \ubd84\uc0b0 \ud559\uc2b5 \uc0ac\uc6a9\uc744 \uc704\ud55c \ubd84\uc0b0 \ud559\uc2b5 \ud658\uacbd \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130 \ud29c\ub2dd\uacfc \ucd5c\uc801\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"4-model-evaluation"},"4. Model evaluation"),(0,i.kt)("p",null," \ubaa8\ub378 \ud3c9\uac00(Model evaluation)\ub294 \uc2e4\ud5d8 \ud658\uacbd\uacfc \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uad00\ucc30\ud560 \uc218 \uc788\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ud3c9\uac00 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ubaa8\ub378 \uc131\ub2a5 \ud3c9\uac00 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc9c0\uc18d \ud559\uc2b5 \uc2e4\ud589 \uacb0\uacfc\uc5d0 \ub300\ud55c \uc608\uce21 \uc131\ub2a5 \ucd94\uc801"),(0,i.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \ubaa8\ub378\uc758 \uc131\ub2a5 \ube44\uad50\uc640 \uc2dc\uac01\ud654"),(0,i.kt)("li",{parentName:"ul"},"\ud574\uc11d\ud560 \uc218 \uc788\ub294 AI \uae30\uc220\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378 \ucd9c\ub825 \ud574\uc11d \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"5-model-serving"},"5. Model serving"),(0,i.kt)("p",null," \ubaa8\ub378 \uc11c\ube59(Model serving)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uc11c\ube59\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\uc800 \uc9c0\uc5f0 \ucd94\ub860\uacfc \uace0\uac00\uc6a9\uc131 \ucd94\ub860 \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c ML \ubaa8\ub378 \uc11c\ube59 \ud504\ub808\uc784\uc6cc\ud06c \uc9c0\uc6d0(Tensorflow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGGoost. etc)"),(0,i.kt)("li",{parentName:"ul"},"\ubcf5\uc7a1\ud55c \ud615\ud0dc\uc758 \ucd94\ub860 \ub8e8\ud2f4 \uae30\ub2a5 \uc81c\uacf5, \uc608\ub97c \ub4e4\uc5b4 \uc804\ucc98\ub9ac(preprocess) \ub610\ub294 \ud6c4\ucc98\ub9ac(postprocess) \uae30\ub2a5\uacfc \ucd5c\uc885 \uacb0\uacfc\ub97c \uc704\ud574 \ub2e4\uc218\uc758 \ubaa8\ub378\uc774 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\ub97c \ub9d0\ud569\ub2c8\ub2e4."),(0,i.kt)("li",{parentName:"ul"},"\uc21c\uac04\uc801\uc73c\ub85c \uce58\uc19f\ub294 \ucd94\ub860 \uc694\uccad\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud55c \uc624\ud1a0 \uc2a4\ucf00\uc77c\ub9c1(autoscaling) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ucd94\ub860 \uc694\uccad\uacfc \ucd94\ub860 \uacb0\uacfc\uc5d0 \ub300\ud55c \ub85c\uae45 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"6-online-experimentation"},"6. Online experimentation"),(0,i.kt)("p",null," \uc628\ub77c\uc778 \uc2e4\ud5d8(Online experimentation)\uc740 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc774 \uc0dd\uc131\ub418\uc5c8\uc744 \ub54c, \uc774 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uba74 \uc5b4\ub290 \uc815\ub3c4\uc758 \uc131\ub2a5\uc744 \ubcf4\uc77c \uac83\uc778\uc9c0 \uac80\uc99d\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc774 \uae30\ub2a5\uc740 \uc0c8 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\ub294 \uac83\uae4c\uc9c0 \uc5f0\ub3d9\ud558\uae30 \uc704\ud574 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\uc640 \uc5f0\ub3d9\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\uce74\ub098\ub9ac(canary) & \uc100\ub3c4(shadow) \ubc30\ud3ec \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"A/B \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\uba40\ud2f0 \uc554\ub4dc \ubc34\ub527(Multi-armed bandit) \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,i.kt)("p",null,"\ubaa8\ub378 \ubaa8\ub2c8\ud130\ub9c1(Model Monitoring)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \ubaa8\ub2c8\ud130\ub9c1\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4 \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \ub5a8\uc5b4\uc838 \uc5c5\ub370\uc774\ud2b8\uac00 \ud544\uc694\ud55c\uc9c0\uc5d0 \ub300\ud55c \uc815\ubcf4 \ub4f1\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,i.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ud30c\uc774\ud504\ub77c\uc778(ML Pipeline)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ubcf5\uc7a1\ud55c ML \ud559\uc2b5\uacfc \ucd94\ub860 \uc791\uc5c5\uc744 \uad6c\uc131\ud558\uace0 \uc81c\uc5b4\ud558\uace0 \uc790\ub3d9\ud654\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \uc774\ubca4\ud2b8\ub97c \uc18c\uc2a4\ub97c \ud1b5\ud55c \ud30c\uc774\ud504\ub77c\uc778 \uc2e4\ud589 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130\uc640 \uc0dd\uc131\ub418\ub294 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub97c \uc704\ud55c \uba38\uc2e0\ub7ec\ub2dd \uba54\ud0c0\ub370\uc774\ud130 \ucd94\uc801\uacfc \uc5f0\ub3d9 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc778 \uba38\uc2e0\ub7ec\ub2dd \uc791\uc5c5\uc744 \uc704\ud55c \ub0b4\uc7a5 \ucef4\ud3ec\ub10c\ud2b8 \uc9c0\uc6d0\uacfc \uc0ac\uc6a9\uc790\uac00 \uc9c1\uc811 \uad6c\ud604\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \uc9c0\uc6d0 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc2e4\ud589 \ud658\uacbd \uc81c\uacf5 \uae30\ub2a5")),(0,i.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,i.kt)("p",null," \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \uc0dd\uba85 \uc8fc\uae30(Lifecycle)\uc744 \uc911\uc559 \uc800\uc7a5\uc18c\uc5d0\uc11c \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378 \uadf8\ub9ac\uace0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ub300\ud55c \ub4f1\ub85d, \ucd94\uc801, \ubc84\uc800\ub2dd \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ubc30\ud3ec\ub97c \uc704\ud574 \ud544\uc694\ud55c \ub370\uc774\ud130\uc640 \ub7f0\ud0c0\uc784 \ud328\ud0a4\uc9c0\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4 \uc800\uc7a5 \uae30\ub2a5")),(0,i.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc5d0 \ub300\ud55c \uacf5\uc720, \uac80\uc0c9, \uc7ac\uc0ac\uc6a9 \uadf8\ub9ac\uace0 \ubc84\uc804 \uad00\ub9ac \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc774\ubca4\ud2b8 \uc2a4\ud2b8\ub9ac\ubc0d \ubc0f \uc628\ub77c\uc778 \ucd94\ub860 \uc791\uc5c5\uc5d0 \ub300\ud55c \uc2e4\uc2dc\uac04 \ucc98\ub9ac \ubc0f \uc800 \uc9c0\uc5f0 \uc11c\ube59 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc0ac\uc9c4, \ud14d\uc2a4\ud2b8, \ud14c\uc774\ube14 \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \uac19\uc740 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130 \uc9c0\uc6d0 \uae30\ub2a5")),(0,i.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,i.kt)("p",null," MLOps\uc758 \uac01 \ub2e8\uacc4\uc5d0\uc11c\ub294 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \uc0b0\ucd9c\ubb3c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. ML \uba54\ud0c0\ub370\uc774\ud130\ub294 \uc774\ub7f0 \uc0b0\ucd9c\ubb3c\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\nML \uba54\ud0c0\ub370\uc774\ud130\uc640 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub294 \uc0b0\ucd9c\ubb3c\uc758 \uc704\uce58, \ud0c0\uc785, \uc18d\uc131, \uadf8\ub9ac\uace0 \uad00\ub828\ub41c \uc2e4\ud5d8(experiment)\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uad00\ub9ac\ud558\uae30 \uc704\ud574 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \ud788\uc2a4\ud1a0\ub9ac \uad00\ub9ac \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc2e4\ud5d8\uacfc \ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130 \uc124\uc815\uc5d0 \ub300\ud55c \ucd94\uc801, \uacf5\uc720 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \uc800\uc7a5, \uc811\uadfc, \uc2dc\uac01\ud654, \ub2e4\uc6b4\ub85c\ub4dc \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\ub978 MLOps \uae30\ub2a5\uacfc\uc758 \ud1b5\ud569 \uae30\ub2a5 \uc81c\uacf5")))}c.isMDXComponent=!0},878:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8084],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>k});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=r.createContext({}),u=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},s=function(e){var t=u(e.components);return r.createElement(p.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,l=e.originalType,p=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),d=u(n),m=i,k=d["".concat(p,".").concat(m)]||d[m]||c[m]||l;return n?r.createElement(k,a(a({ref:t},s),{},{components:n})):r.createElement(k,a({ref:t},s))}));function k(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var l=n.length,a=new Array(l);a[0]=m;var o={};for(var p in t)hasOwnProperty.call(t,p)&&(o[p]=t[p]);o.originalType=e,o[d]="string"==typeof e?e:i,a[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>a,default:()=>c,frontMatter:()=>l,metadata:()=>o,toc:()=>u});var r=n(7462),i=(n(7294),n(3905));const l={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},a=void 0,o={unversionedId:"introduction/component",id:"introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/docs/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/docs/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/docs/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/docs/introduction/why_kubernetes"}},p={},u=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model training",id:"3-model-training",level:3},{value:"4. Model evaluation",id:"4-model-evaluation",level:3},{value:"5. Model serving",id:"5-model-serving",level:3},{value:"6. Online experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],s={toc:u},d="wrapper";function c(e){let{components:t,...l}=e;return(0,i.kt)(d,(0,r.Z)({},s,l,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,i.kt)("p",null," 2021\ub144 5\uc6d4\uc5d0 \ubc1c\ud45c\ub41c \uad6c\uae00\uc758 ",(0,i.kt)("a",{parentName:"p",href:"https://services.google.com/fh/files/misc/practitioners_guide_to_mlops_whitepaper.pdf"},"white paper : Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning"),"\uc5d0\uc11c\ub294 MLOps\uc758 \ud575\uc2ec \uae30\ub2a5\ub4e4\ub85c \ub2e4\uc74c\uacfc \uac19\uc740 \uac83\ub4e4\uc744 \uc5b8\uae09\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"mlops-component",src:n(878).Z,width:"2352",height:"1890"})),(0,i.kt)("p",null," \uac01 \uae30\ub2a5\uc774 \uc5b4\ub5a4 \uc5ed\ud560\uc744 \ud558\ub294\uc9c0 \uc0b4\ud3b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,i.kt)("p",null," \uc2e4\ud5d8(Experimentation)\uc740 \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub4e4\uc774 \ub370\uc774\ud130\ub97c \ubd84\uc11d\ud558\uace0, \ud504\ub85c\ud1a0\ud0c0\uc785 \ubaa8\ub378\uc744 \ub9cc\ub4e4\uba70 \ud559\uc2b5 \uae30\ub2a5\uc744 \uad6c\ud604\ud560 \uc218 \uc788\ub3c4\ub85d \ud558\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\uae43(Git)\uacfc \uac19\uc740 \ubc84\uc804 \ucee8\ud2b8\ub864 \ub3c4\uad6c\uc640 \ud1b5\ud569\ub41c \ub178\ud2b8\ubd81(Jupyter Notebook) \ud658\uacbd \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\uc0ac\uc6a9\ud55c \ub370\uc774\ud130, \ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130, \ud3c9\uac00 \uc9c0\ud45c\ub97c \ud3ec\ud568\ud55c \uc2e4\ud5d8 \ucd94\uc801 \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc640 \ubaa8\ub378\uc5d0 \ub300\ud55c \ubd84\uc11d \ubc0f \uc2dc\uac01\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,i.kt)("p",null," \ub370\uc774\ud130 \ucc98\ub9ac(Data Processing)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378 \uac1c\ubc1c \ub2e8\uacc4, \uc9c0\uc18d\uc801\uc778 \ud559\uc2b5(Continuous Training) \ub2e8\uacc4, \uadf8\ub9ac\uace0 API \ubc30\ud3ec(API Deployment) \ub2e8\uacc4\uc5d0\uc11c \ub9ce\uc740 \uc591\uc758 \ub370\uc774\ud130\ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ub370\uc774\ud130 \uc18c\uc2a4\uc640 \uc11c\ube44\uc2a4\uc5d0 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \ucee4\ub125\ud130(connector) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \ud638\ud658\ub418\ub294 \ub370\uc774\ud130 \uc778\ucf54\ub354(encoder) & \ub514\ucf54\ub354(decoder) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ub370\uc774\ud130 \ubcc0\ud658\uacfc \ud53c\ucc98 \uc5d4\uc9c0\ub2c8\uc5b4\ub9c1(feature engineering) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ud559\uc2b5\uacfc \uc11c\ube59\uc744 \uc704\ud55c \ud655\uc7a5 \uac00\ub2a5\ud55c \ubc30\uce58, \uc2a4\ud2b8\ub9bc \ub370\uc774\ud130 \ucc98\ub9ac \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"3-model-training"},"3. Model training"),(0,i.kt)("p",null," \ubaa8\ub378 \ud559\uc2b5(Model training)\uc740 \ubaa8\ub378 \ud559\uc2b5\uc744 \uc704\ud55c \uc54c\uace0\ub9ac\uc998\uc744 \ud6a8\uc728\uc801\uc73c\ub85c \uc2e4\ud589\uc2dc\ucf1c\uc8fc\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"ML \ud504\ub808\uc784\uc6cc\ud06c\uc758 \uc2e4\ud589\uc744 \uc704\ud55c \ud658\uacbd \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc218\uc758 GPU / \ubd84\uc0b0 \ud559\uc2b5 \uc0ac\uc6a9\uc744 \uc704\ud55c \ubd84\uc0b0 \ud559\uc2b5 \ud658\uacbd \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ud558\uc774\ud37c \ud30c\ub77c\ubbf8\ud130 \ud29c\ub2dd\uacfc \ucd5c\uc801\ud654 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"4-model-evaluation"},"4. Model evaluation"),(0,i.kt)("p",null," \ubaa8\ub378 \ud3c9\uac00(Model evaluation)\ub294 \uc2e4\ud5d8 \ud658\uacbd\uacfc \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ub3d9\uc791\ud558\ub294 \ubaa8\ub378\uc758 \uc131\ub2a5\uc744 \uad00\ucc30\ud560 \uc218 \uc788\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ud3c9\uac00 \ub370\uc774\ud130\uc5d0 \ub300\ud55c \ubaa8\ub378 \uc131\ub2a5 \ud3c9\uac00 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc9c0\uc18d \ud559\uc2b5 \uc2e4\ud589 \uacb0\uacfc\uc5d0 \ub300\ud55c \uc608\uce21 \uc131\ub2a5 \ucd94\uc801"),(0,i.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \ubaa8\ub378\uc758 \uc131\ub2a5 \ube44\uad50\uc640 \uc2dc\uac01\ud654"),(0,i.kt)("li",{parentName:"ul"},"\ud574\uc11d\ud560 \uc218 \uc788\ub294 AI \uae30\uc220\uc744 \uc774\uc6a9\ud55c \ubaa8\ub378 \ucd9c\ub825 \ud574\uc11d \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"5-model-serving"},"5. Model serving"),(0,i.kt)("p",null," \ubaa8\ub378 \uc11c\ube59(Model serving)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uace0 \uc11c\ube59\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\uc800 \uc9c0\uc5f0 \ucd94\ub860\uacfc \uace0\uac00\uc6a9\uc131 \ucd94\ub860 \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c ML \ubaa8\ub378 \uc11c\ube59 \ud504\ub808\uc784\uc6cc\ud06c \uc9c0\uc6d0(Tensorflow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGGoost. etc)"),(0,i.kt)("li",{parentName:"ul"},"\ubcf5\uc7a1\ud55c \ud615\ud0dc\uc758 \ucd94\ub860 \ub8e8\ud2f4 \uae30\ub2a5 \uc81c\uacf5, \uc608\ub97c \ub4e4\uc5b4 \uc804\ucc98\ub9ac(preprocess) \ub610\ub294 \ud6c4\ucc98\ub9ac(postprocess) \uae30\ub2a5\uacfc \ucd5c\uc885 \uacb0\uacfc\ub97c \uc704\ud574 \ub2e4\uc218\uc758 \ubaa8\ub378\uc774 \uc0ac\uc6a9\ub418\ub294 \uacbd\uc6b0\ub97c \ub9d0\ud569\ub2c8\ub2e4."),(0,i.kt)("li",{parentName:"ul"},"\uc21c\uac04\uc801\uc73c\ub85c \uce58\uc19f\ub294 \ucd94\ub860 \uc694\uccad\uc744 \ucc98\ub9ac\ud558\uae30 \uc704\ud55c \uc624\ud1a0 \uc2a4\ucf00\uc77c\ub9c1(autoscaling) \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ucd94\ub860 \uc694\uccad\uacfc \ucd94\ub860 \uacb0\uacfc\uc5d0 \ub300\ud55c \ub85c\uae45 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"6-online-experimentation"},"6. Online experimentation"),(0,i.kt)("p",null," \uc628\ub77c\uc778 \uc2e4\ud5d8(Online experimentation)\uc740 \uc0c8\ub85c\uc6b4 \ubaa8\ub378\uc774 \uc0dd\uc131\ub418\uc5c8\uc744 \ub54c, \uc774 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\uba74 \uc5b4\ub290 \uc815\ub3c4\uc758 \uc131\ub2a5\uc744 \ubcf4\uc77c \uac83\uc778\uc9c0 \uac80\uc99d\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc774 \uae30\ub2a5\uc740 \uc0c8 \ubaa8\ub378\uc744 \ubc30\ud3ec\ud558\ub294 \uac83\uae4c\uc9c0 \uc5f0\ub3d9\ud558\uae30 \uc704\ud574 \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\uc640 \uc5f0\ub3d9\ub418\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\uce74\ub098\ub9ac(canary) & \uc100\ub3c4(shadow) \ubc30\ud3ec \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"A/B \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\uba40\ud2f0 \uc554\ub4dc \ubc34\ub527(Multi-armed bandit) \ud14c\uc2a4\ud2b8 \uae30\ub2a5 \uc81c\uacf5")),(0,i.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,i.kt)("p",null,"\ubaa8\ub378 \ubaa8\ub2c8\ud130\ub9c1(Model Monitoring)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub3d9\uc791\ud558\uace0 \uc788\ub294\uc9c0\ub97c \ubaa8\ub2c8\ud130\ub9c1\ud558\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc608\ub97c \ub4e4\uc5b4 \ubaa8\ub378\uc758 \uc131\ub2a5\uc774 \ub5a8\uc5b4\uc838 \uc5c5\ub370\uc774\ud2b8\uac00 \ud544\uc694\ud55c\uc9c0\uc5d0 \ub300\ud55c \uc815\ubcf4 \ub4f1\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,i.kt)("p",null,"\uba38\uc2e0\ub7ec\ub2dd \ud30c\uc774\ud504\ub77c\uc778(ML Pipeline)\uc740 \uc0c1\uc6a9 \ud658\uacbd\uc5d0\uc11c \ubcf5\uc7a1\ud55c ML \ud559\uc2b5\uacfc \ucd94\ub860 \uc791\uc5c5\uc744 \uad6c\uc131\ud558\uace0 \uc81c\uc5b4\ud558\uace0 \uc790\ub3d9\ud654\ud558\uae30 \uc704\ud55c \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ub2e4\uc591\ud55c \uc774\ubca4\ud2b8\ub97c \uc18c\uc2a4\ub97c \ud1b5\ud55c \ud30c\uc774\ud504\ub77c\uc778 \uc2e4\ud589 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130\uc640 \uc0dd\uc131\ub418\ub294 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub97c \uc704\ud55c \uba38\uc2e0\ub7ec\ub2dd \uba54\ud0c0\ub370\uc774\ud130 \ucd94\uc801\uacfc \uc5f0\ub3d9 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc77c\ubc18\uc801\uc778 \uba38\uc2e0\ub7ec\ub2dd \uc791\uc5c5\uc744 \uc704\ud55c \ub0b4\uc7a5 \ucef4\ud3ec\ub10c\ud2b8 \uc9c0\uc6d0\uacfc \uc0ac\uc6a9\uc790\uac00 \uc9c1\uc811 \uad6c\ud604\ud55c \ucef4\ud3ec\ub10c\ud2b8\uc5d0 \ub300\ud55c \uc9c0\uc6d0 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc11c\ub85c \ub2e4\ub978 \uc2e4\ud589 \ud658\uacbd \uc81c\uacf5 \uae30\ub2a5")),(0,i.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,i.kt)("p",null," \ubaa8\ub378 \uc800\uc7a5\uc18c(Model Registry)\ub294 \uba38\uc2e0\ub7ec\ub2dd \ubaa8\ub378\uc758 \uc0dd\uba85 \uc8fc\uae30(Lifecycle)\uc744 \uc911\uc559 \uc800\uc7a5\uc18c\uc5d0\uc11c \uad00\ub9ac\ud560 \uc218 \uc788\uac8c \ud574 \uc8fc\ub294 \uae30\ub2a5\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ud559\uc2b5\ub41c \ubaa8\ub378 \uadf8\ub9ac\uace0 \ubc30\ud3ec\ub41c \ubaa8\ub378\uc5d0 \ub300\ud55c \ub4f1\ub85d, \ucd94\uc801, \ubc84\uc800\ub2dd \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ubc30\ud3ec\ub97c \uc704\ud574 \ud544\uc694\ud55c \ub370\uc774\ud130\uc640 \ub7f0\ud0c0\uc784 \ud328\ud0a4\uc9c0\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4 \uc800\uc7a5 \uae30\ub2a5")),(0,i.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"\ub370\uc774\ud130\uc5d0 \ub300\ud55c \uacf5\uc720, \uac80\uc0c9, \uc7ac\uc0ac\uc6a9 \uadf8\ub9ac\uace0 \ubc84\uc804 \uad00\ub9ac \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc774\ubca4\ud2b8 \uc2a4\ud2b8\ub9ac\ubc0d \ubc0f \uc628\ub77c\uc778 \ucd94\ub860 \uc791\uc5c5\uc5d0 \ub300\ud55c \uc2e4\uc2dc\uac04 \ucc98\ub9ac \ubc0f \uc800 \uc9c0\uc5f0 \uc11c\ube59 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc0ac\uc9c4, \ud14d\uc2a4\ud2b8, \ud14c\uc774\ube14 \ud615\ud0dc\uc758 \ub370\uc774\ud130\uc640 \uac19\uc740 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ub370\uc774\ud130 \uc9c0\uc6d0 \uae30\ub2a5")),(0,i.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,i.kt)("p",null," MLOps\uc758 \uac01 \ub2e8\uacc4\uc5d0\uc11c\ub294 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \uc0b0\ucd9c\ubb3c\ub4e4\uc774 \uc0dd\uc131\ub429\ub2c8\ub2e4. ML \uba54\ud0c0\ub370\uc774\ud130\ub294 \uc774\ub7f0 \uc0b0\ucd9c\ubb3c\ub4e4\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.\nML \uba54\ud0c0\ub370\uc774\ud130\uc640 \uc0b0\ucd9c\ubb3c \uad00\ub9ac\ub294 \uc0b0\ucd9c\ubb3c\uc758 \uc704\uce58, \ud0c0\uc785, \uc18d\uc131, \uadf8\ub9ac\uace0 \uad00\ub828\ub41c \uc2e4\ud5d8(experiment)\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uad00\ub9ac\ud558\uae30 \uc704\ud574 \ub2e4\uc74c\uacfc \uac19\uc740 \uae30\ub2a5\ub4e4\uc744 \uc81c\uacf5\ud569\ub2c8\ub2e4."),(0,i.kt)("ul",null,(0,i.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \ud788\uc2a4\ud1a0\ub9ac \uad00\ub9ac \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"\uc2e4\ud5d8\uacfc \ud30c\uc774\ud504\ub77c\uc778 \ud30c\ub77c\ubbf8\ud130 \uc124\uc815\uc5d0 \ub300\ud55c \ucd94\uc801, \uacf5\uc720 \uae30\ub2a5"),(0,i.kt)("li",{parentName:"ul"},"ML \uc0b0\ucd9c\ubb3c\uc5d0 \ub300\ud55c \uc800\uc7a5, \uc811\uadfc, \uc2dc\uac01\ud654, \ub2e4\uc6b4\ub85c\ub4dc \uae30\ub2a5 \uc81c\uacf5"),(0,i.kt)("li",{parentName:"ul"},"\ub2e4\ub978 MLOps \uae30\ub2a5\uacfc\uc758 \ud1b5\ud569 \uae30\ub2a5 \uc81c\uacf5")))}c.isMDXComponent=!0},878:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file diff --git a/assets/js/f28dfc8e.0caaaad0.js b/assets/js/f28dfc8e.3b4d20c5.js similarity index 98% rename from assets/js/f28dfc8e.0caaaad0.js rename to assets/js/f28dfc8e.3b4d20c5.js index 4f4e260d..3a7c2944 100644 --- a/assets/js/f28dfc8e.0caaaad0.js +++ b/assets/js/f28dfc8e.3b4d20c5.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3641],{3905:(t,e,a)=>{a.d(e,{Zo:()=>d,kt:()=>c});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function p(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),m=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):p(p({},e),t)),a},d=function(t){var e=m(t.components);return n.createElement(i.Provider,{value:e},t.children)},s="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},k=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,d=o(t,["components","mdxType","originalType","parentName"]),s=m(a),k=r,c=s["".concat(i,".").concat(k)]||s[k]||u[k]||l;return a?n.createElement(c,p(p({ref:e},d),{},{components:a})):n.createElement(c,p({ref:e},d))}));function c(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,p=new Array(l);p[0]=k;var o={};for(var i in e)hasOwnProperty.call(e,i)&&(o[i]=e[i]);o.originalType=t,o[s]="string"==typeof t?t:r,p[1]=o;for(var m=2;m{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>p,default:()=>u,frontMatter:()=>l,metadata:()=>o,toc:()=>m});var n=a(7462),r=(a(7294),a(3905));const l={title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},p=void 0,o={unversionedId:"further-readings/info",id:"further-readings/info",title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",description:"MLOps Component",source:"@site/docs/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/docs/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/further-readings/info.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",frontMatter:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/appendix/metallb"}},i={},m=[{value:"MLOps Component",id:"mlops-component",level:2}],d={toc:m},s="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(s,(0,n.Z)({},d,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/introduction/component"},"MLOps Concepts"),"\uc5d0\uc11c \ub2e4\ub8e8\uc5c8\ub358 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub3c4\uc2dd\ud654\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(6244).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"\uc774 \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8ec \uae30\uc220 \uc2a4\ud0dd\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(3249).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"\ubcf4\uc2dc\ub294 \uac83\ucc98\ub7fc \uc544\uc9c1 \uc6b0\ub9ac\uac00 \ub2e4\ub8e8\uc9c0 \ubabb\ud55c \ub9ce\uc740 MLOps \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc2dc\uac04 \uad00\uacc4\uc0c1 \uc774\ubc88\uc5d0 \ubaa8\ub450 \ub2e4\ub8e8\uc9c0\ub294 \ubabb\ud588\uc9c0\ub9cc, \ub9cc\uc57d \ud544\uc694\ud558\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\ubcf4\uba74 \uc88b\uc744 \uac83 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(9505).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"\uc138\ubd80 \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},6244:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},3249:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},9505:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3641],{3905:(t,e,a)=>{a.d(e,{Zo:()=>d,kt:()=>c});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function p(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),m=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):p(p({},e),t)),a},d=function(t){var e=m(t.components);return n.createElement(i.Provider,{value:e},t.children)},s="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},k=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,d=o(t,["components","mdxType","originalType","parentName"]),s=m(a),k=r,c=s["".concat(i,".").concat(k)]||s[k]||u[k]||l;return a?n.createElement(c,p(p({ref:e},d),{},{components:a})):n.createElement(c,p({ref:e},d))}));function c(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,p=new Array(l);p[0]=k;var o={};for(var i in e)hasOwnProperty.call(e,i)&&(o[i]=e[i]);o.originalType=t,o[s]="string"==typeof t?t:r,p[1]=o;for(var m=2;m{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>p,default:()=>u,frontMatter:()=>l,metadata:()=>o,toc:()=>m});var n=a(7462),r=(a(7294),a(3905));const l={title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},p=void 0,o={unversionedId:"further-readings/info",id:"further-readings/info",title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",description:"MLOps Component",source:"@site/docs/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/docs/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/further-readings/info.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",frontMatter:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/appendix/metallb"}},i={},m=[{value:"MLOps Component",id:"mlops-component",level:2}],d={toc:m},s="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(s,(0,n.Z)({},d,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/introduction/component"},"MLOps Concepts"),"\uc5d0\uc11c \ub2e4\ub8e8\uc5c8\ub358 \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub3c4\uc2dd\ud654\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(6244).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"\uc774 \uc911 ",(0,r.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc5d0\uc11c \ub2e4\ub8ec \uae30\uc220 \uc2a4\ud0dd\ub4e4\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(3249).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"\ubcf4\uc2dc\ub294 \uac83\ucc98\ub7fc \uc544\uc9c1 \uc6b0\ub9ac\uac00 \ub2e4\ub8e8\uc9c0 \ubabb\ud55c \ub9ce\uc740 MLOps \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc774 \uc788\uc2b5\ub2c8\ub2e4. "),(0,r.kt)("p",null,"\uc2dc\uac04 \uad00\uacc4\uc0c1 \uc774\ubc88\uc5d0 \ubaa8\ub450 \ub2e4\ub8e8\uc9c0\ub294 \ubabb\ud588\uc9c0\ub9cc, \ub9cc\uc57d \ud544\uc694\ud558\ub2e4\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uc624\ud508\uc18c\uc2a4\ub4e4\uc744 \uba3c\uc800 \ucc38\uace0\ud574\ubcf4\uba74 \uc88b\uc744 \uac83 \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(9505).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"\uc138\ubd80 \ub0b4\uc6a9\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},6244:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},3249:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},9505:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file diff --git a/assets/js/f84c40fb.27e7ffdf.js b/assets/js/f84c40fb.ab148738.js similarity index 99% rename from assets/js/f84c40fb.27e7ffdf.js rename to assets/js/f84c40fb.ab148738.js index 3c0cb3b8..6d9ea939 100644 --- a/assets/js/f84c40fb.27e7ffdf.js +++ b/assets/js/f84c40fb.ab148738.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9700],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>c});var a=t(7294);function p(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(p[t]=e[t]);return p}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(p[t]=e[t])}return p}var o=a.createContext({}),u=function(e){var n=a.useContext(o),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},m=function(e){var n=u(e.components);return a.createElement(o.Provider,{value:n},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,p=e.mdxType,r=e.originalType,o=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),s=u(t),_=p,c=s["".concat(o,".").concat(_)]||s[_]||d[_]||r;return t?a.createElement(c,i(i({ref:n},m),{},{components:t})):a.createElement(c,i({ref:n},m))}));function c(e,n){var t=arguments,p=n&&n.mdxType;if("string"==typeof e||p){var r=t.length,i=new Array(r);i[0]=_;var l={};for(var o in n)hasOwnProperty.call(n,o)&&(l[o]=n[o]);l.originalType=e,l[s]="string"==typeof e?e:p,i[1]=l;for(var u=2;u{t.r(n),t.d(n,{assets:()=>o,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>u});var a=t(7462),p=(t(7294),t(3905));const r={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow/advanced-run",id:"version-1.0/kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/docs/1.0/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/docs/1.0/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/docs/1.0/kubeflow/advanced-mlflow"}},o={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],m={toc:u},s="wrapper";function d(e){let{components:n,...r}=e;return(0,p.kt)(s,(0,a.Z)({},m,r,{components:n,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"run-result"},"Run Result"),(0,p.kt)("p",null,"Run \uc2e4\ud589 \uacb0\uacfc\ub97c \ub20c\ub7ec\ubcf4\uba74 3\uac1c\uc758 \ud0ed\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4.\n\uac01\uac01 Graph, Run output, Config \uc785\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-0.png",src:t(6394).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"graph"},"Graph"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-1.png",src:t(6255).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub798\ud504\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub204\ub974\uba74 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,p.kt)("p",null,"Input/Output \ud0ed\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud55c Config\ub4e4\uacfc Input, Output Artifacts\ub97c \ud655\uc778\ud558\uace0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"logs"},"Logs"),(0,p.kt)("p",null,"Logs\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc \uc2e4\ud589 \uc911 \ub098\uc624\ub294 \ubaa8\ub4e0 stdout\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\ub2e4\ub9cc pod\uc740 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub09c \ud6c4 \uc9c0\uc6cc\uc9c0\uae30 \ub54c\ubb38\uc5d0 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub098\uba74 \uc774 \ud0ed\uc5d0\uc11c\ub294 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.\n\uc774\ub54c\ub294 Output artifacts\uc758 main-logs\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"visualizations"},"Visualizations"),(0,p.kt)("p",null,"Visualizations\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0dd\uc131\ub41c \ud50c\ub78f\uc744 \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud50c\ub78f\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")')," argument\ub85c \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uac12\uc744 \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4. \uc774 \ub54c \ud50c\ub78f\uc758 \ud615\ud0dc\ub294 html \ud3ec\ub9f7\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,p.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"plot_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Visualization\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-5.png",src:t(2244).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"run-output"},"Run output"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-2.png",src:t(6266).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Run output\uc740 kubeflow\uc5d0\uc11c \uc9c0\uc815\ud55c \ud615\ud0dc\ub85c \uc0dd\uae34 Artifacts\ub97c \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uacf3\uc774\uba70 \ud3c9\uac00 \uc9c0\ud45c(Metric)\ub97c \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c(Metric)\uc744 \ubcf4\uc5ec\uc8fc\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument\uc5d0 \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uc774\ub984\uacfc \uac12\uc744 json \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c ",(0,p.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-pipeline"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc5d0\uc11c \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc5d0 \ucd94\uac00 \ud6c4 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n\uc804\uccb4 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Run Output\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-4.png",src:t(1882).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"config"},"Config"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-3.png",src:t(1487).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Config\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc785\ub825\ubc1b\uc740 \ubaa8\ub4e0 \uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},6394:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},6255:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},6266:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},1487:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},1882:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},2244:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9700],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>c});var a=t(7294);function p(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(p[t]=e[t]);return p}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(p[t]=e[t])}return p}var o=a.createContext({}),u=function(e){var n=a.useContext(o),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},m=function(e){var n=u(e.components);return a.createElement(o.Provider,{value:n},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,p=e.mdxType,r=e.originalType,o=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),s=u(t),_=p,c=s["".concat(o,".").concat(_)]||s[_]||d[_]||r;return t?a.createElement(c,i(i({ref:n},m),{},{components:t})):a.createElement(c,i({ref:n},m))}));function c(e,n){var t=arguments,p=n&&n.mdxType;if("string"==typeof e||p){var r=t.length,i=new Array(r);i[0]=_;var l={};for(var o in n)hasOwnProperty.call(n,o)&&(l[o]=n[o]);l.originalType=e,l[s]="string"==typeof e?e:p,i[1]=l;for(var u=2;u{t.r(n),t.d(n,{assets:()=>o,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>u});var a=t(7462),p=(t(7294),t(3905));const r={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow/advanced-run",id:"version-1.0/kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/versioned_docs/version-1.0/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/docs/1.0/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/docs/1.0/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/docs/1.0/kubeflow/advanced-mlflow"}},o={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],m={toc:u},s="wrapper";function d(e){let{components:n,...r}=e;return(0,p.kt)(s,(0,a.Z)({},m,r,{components:n,mdxType:"MDXLayout"}),(0,p.kt)("h2",{id:"run-result"},"Run Result"),(0,p.kt)("p",null,"Run \uc2e4\ud589 \uacb0\uacfc\ub97c \ub20c\ub7ec\ubcf4\uba74 3\uac1c\uc758 \ud0ed\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4.\n\uac01\uac01 Graph, Run output, Config \uc785\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-0.png",src:t(6394).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"graph"},"Graph"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-1.png",src:t(6255).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"\uadf8\ub798\ud504\uc5d0\uc11c\ub294 \uc2e4\ud589\ub41c \ucef4\ud3ec\ub10c\ud2b8\ub97c \ub204\ub974\uba74 \ucef4\ud3ec\ub10c\ud2b8\uc758 \uc2e4\ud589 \uc815\ubcf4\ub97c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,p.kt)("p",null,"Input/Output \ud0ed\uc740 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0ac\uc6a9\ud55c Config\ub4e4\uacfc Input, Output Artifacts\ub97c \ud655\uc778\ud558\uace0 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"logs"},"Logs"),(0,p.kt)("p",null,"Logs\uc5d0\uc11c\ub294 \ud30c\uc774\uc36c \ucf54\ub4dc \uc2e4\ud589 \uc911 \ub098\uc624\ub294 \ubaa8\ub4e0 stdout\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\ub2e4\ub9cc pod\uc740 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub09c \ud6c4 \uc9c0\uc6cc\uc9c0\uae30 \ub54c\ubb38\uc5d0 \uc77c\uc815 \uc2dc\uac04\uc774 \uc9c0\ub098\uba74 \uc774 \ud0ed\uc5d0\uc11c\ub294 \ud655\uc778\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4.\n\uc774\ub54c\ub294 Output artifacts\uc758 main-logs\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("h3",{id:"visualizations"},"Visualizations"),(0,p.kt)("p",null,"Visualizations\uc5d0\uc11c\ub294 \ucef4\ud3ec\ub10c\ud2b8\uc5d0\uc11c \uc0dd\uc131\ub41c \ud50c\ub78f\uc744 \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud50c\ub78f\uc744 \uc0dd\uc131\ud558\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")')," argument\ub85c \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uac12\uc744 \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4. \uc774 \ub54c \ud50c\ub78f\uc758 \ud615\ud0dc\ub294 html \ud3ec\ub9f7\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4.\n\ubcc0\ud658\ud558\ub294 \uacfc\uc815\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,p.kt)("p",null,"\ud30c\uc774\ud504\ub77c\uc778\uc73c\ub85c \uc791\uc131\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub429\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc774 \uc2a4\ud06c\ub9bd\ud2b8\ub97c \uc2e4\ud589\ud574\uc11c \ub098\uc628 ",(0,p.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),"\uc744 \ud655\uc778\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("details",null,(0,p.kt)("summary",null,"plot_pipeline.yaml"),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Visualization\uc744 \ud074\ub9ad\ud569\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-5.png",src:t(2244).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"run-output"},"Run output"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-2.png",src:t(6266).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Run output\uc740 kubeflow\uc5d0\uc11c \uc9c0\uc815\ud55c \ud615\ud0dc\ub85c \uc0dd\uae34 Artifacts\ub97c \ubaa8\uc544\uc11c \ubcf4\uc5ec\uc8fc\ub294 \uacf3\uc774\uba70 \ud3c9\uac00 \uc9c0\ud45c(Metric)\ub97c \ubcf4\uc5ec\uc90d\ub2c8\ub2e4."),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c(Metric)\uc744 \ubcf4\uc5ec\uc8fc\uae30 \uc704\ud574\uc11c\ub294 ",(0,p.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument\uc5d0 \ubcf4\uc5ec\uc8fc\uace0 \uc2f6\uc740 \uc774\ub984\uacfc \uac12\uc744 json \ud615\ud0dc\ub85c \uc800\uc7a5\ud558\uba74 \ub429\ub2c8\ub2e4.\n\uc608\ub97c \ub4e4\uc5b4\uc11c \ub2e4\uc74c\uacfc \uac19\uc774 \uc791\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,p.kt)("p",null,"\ud3c9\uac00 \uc9c0\ud45c\ub97c \uc0dd\uc131\ud558\ub294 \ucef4\ud3ec\ub10c\ud2b8\ub97c ",(0,p.kt)("a",{parentName:"p",href:"/docs/1.0/kubeflow/basic-pipeline"},"\ud30c\uc774\ud504\ub77c\uc778"),"\uc5d0\uc11c \uc0dd\uc131\ud55c \ud30c\uc774\ud504\ub77c\uc778\uc5d0 \ucd94\uac00 \ud6c4 \uc2e4\ud589\ud574 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.\n\uc804\uccb4 \ud30c\uc774\ud504\ub77c\uc778\uc740 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,p.kt)("pre",null,(0,p.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,p.kt)("p",null,"\uc2e4\ud589 \ud6c4 Run Output\uc744 \ud074\ub9ad\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ub098\uc635\ub2c8\ub2e4."),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-4.png",src:t(1882).Z,width:"3360",height:"2100"})),(0,p.kt)("h2",{id:"config"},"Config"),(0,p.kt)("p",null,(0,p.kt)("img",{alt:"advanced-run-3.png",src:t(1487).Z,width:"3360",height:"2100"})),(0,p.kt)("p",null,"Config\uc5d0\uc11c\ub294 \ud30c\uc774\ud504\ub77c\uc778 Config\ub85c \uc785\ub825\ubc1b\uc740 \ubaa8\ub4e0 \uac12\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."))}d.isMDXComponent=!0},6394:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},6255:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},6266:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},1487:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},1882:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},2244:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file diff --git a/assets/js/facca37e.39b99df2.js b/assets/js/facca37e.a354bc59.js similarity index 99% rename from assets/js/facca37e.39b99df2.js rename to assets/js/facca37e.a354bc59.js index b4740460..76d3d396 100644 --- a/assets/js/facca37e.39b99df2.js +++ b/assets/js/facca37e.a354bc59.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1427],{3905:(t,e,n)=>{n.d(e,{Zo:()=>m,kt:()=>c});var r=n(7294);function a(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function l(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,r)}return n}function o(t){for(var e=1;e=0||(a[n]=t[n]);return a}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(a[n]=t[n])}return a}var i=r.createContext({}),u=function(t){var e=r.useContext(i),n=e;return t&&(n="function"==typeof t?t(e):o(o({},e),t)),n},m=function(t){var e=u(t.components);return r.createElement(i.Provider,{value:e},t.children)},s="mdxType",d={inlineCode:"code",wrapper:function(t){var e=t.children;return r.createElement(r.Fragment,{},e)}},k=r.forwardRef((function(t,e){var n=t.components,a=t.mdxType,l=t.originalType,i=t.parentName,m=p(t,["components","mdxType","originalType","parentName"]),s=u(n),k=a,c=s["".concat(i,".").concat(k)]||s[k]||d[k]||l;return n?r.createElement(c,o(o({ref:e},m),{},{components:n})):r.createElement(c,o({ref:e},m))}));function c(t,e){var n=arguments,a=e&&e.mdxType;if("string"==typeof t||a){var l=n.length,o=new Array(l);o[0]=k;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[s]="string"==typeof t?t:a,o[1]=p;for(var u=2;u{n.r(e),n.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>p,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const l={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},o=void 0,p={unversionedId:"setup-kubernetes/intro",id:"setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/docs/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/docs/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/docs/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/docs/setup-kubernetes/kubernetes"}},i={},u=[{value:"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30",id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30",level:2},{value:"\uad6c\uc131 \uc694\uc18c",id:"\uad6c\uc131-\uc694\uc18c",level:2},{value:"\ud074\ub7ec\uc2a4\ud130",id:"\ud074\ub7ec\uc2a4\ud130",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"\ud074\ub77c\uc774\uc5b8\ud2b8",id:"\ud074\ub77c\uc774\uc5b8\ud2b8",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],m={toc:u},s="wrapper";function d(t){let{components:e,...n}=t;return(0,a.kt)(s,(0,r.Z)({},m,n,{components:e,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30"},"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30"),(0,a.kt)("p",null,"MLOps\ub97c \uacf5\ubd80\ud558\ub294 \ub370 \uc788\uc5b4\uc11c \uac00\uc7a5 \ud070 \uc7a5\ubcbd\uc740 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud574\ubcf4\uace0 \uc0ac\uc6a9\ud574\ubcf4\uae30\uac00 \uc5b4\ub835\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4. AWS, GCP \ub4f1\uc758 \ud37c\ube14\ub9ad \ud074\ub77c\uc6b0\ub4dc \ud639\uc740 Weight & Bias, neptune.ai \ub4f1\uc758 \uc0c1\uc6a9 \ud234\uc744 \uc0ac\uc6a9\ud574\ubcf4\uae30\uc5d0\ub294 \uacfc\uae08\uc5d0 \ub300\ud55c \ubd80\ub2f4\uc774 \uc874\uc7ac\ud558\uace0, \ucc98\uc74c\ubd80\ud130 \ubaa8\ub4e0 \ud658\uacbd\uc744 \ud63c\uc790\uc11c \uad6c\uc131\ud558\uae30\uc5d0\ub294 \uc5b4\ub514\uc11c\ubd80\ud130 \uc2dc\uc791\ud574\uc57c \ud560\uc9c0 \ub9c9\ub9c9\ud558\uac8c \ub290\uaef4\uc9c8 \uc218\ubc16\uc5d0 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7f0 \uc774\uc720\ub4e4\ub85c MLOps\ub97c \uc120\ub73b \uc2dc\uc791\ud574\ubcf4\uc9c0 \ubabb\ud558\uc2dc\ub294 \ubd84\ub4e4\uc744 \uc704\ud574, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub9cc \uc900\ube44\ub418\uc5b4 \uc788\ub2e4\uba74 MLOps \uc2dc\uc2a4\ud15c\uc744 \ubc11\ubc14\ub2e5\ubd80\ud130 \uad6c\ucd95\ud558\uace0 \uc0ac\uc6a9\ud574 \ubcfc \uc218 \uc788\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0, \uac00\uc0c1\uba38\uc2e0\uc744 \ud65c\uc6a9\ud558\uc5ec \ud658\uacbd\uc744 \uad6c\uc131\ud558\uae30"),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"Windows \ud639\uc740 Intel Mac\uc744 \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"p"},"\ubaa8\ub450\uc758 MLops")," \uc2e4\uc2b5\uc744 \uc9c4\ud589 \uc911\uc778 \ubd84\ub4e4\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Virtual Box"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"VMware")," \ub4f1\uc758 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub97c \uc774\uc6a9\ud558\uc5ec \uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uad8c\uc7a5 \uc0ac\uc591\uc744 \ub9de\ucdb0 \uac00\uc0c1 \uba38\uc2e0\uc744 \uc0dd\uc131\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.\n\ub610\ud55c, M1 Mac\uc744 \uc0ac\uc6a9\ud558\uc2dc\ub294 \ubd84\ub4e4\uc740 \uc791\uc131\uc77c(2022\ub144 2\uc6d4) \uae30\uc900\uc73c\ub85c\ub294 Virtual Box, VMware \ub294 \uc774\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"M1 Apple Silicone Mac\uc5d0 \ucd5c\uc801\ud654\ub41c macOS \uc571 \uc9c0\uc6d0 \ud655\uc778\ud558\uae30"),")\n\ub530\ub77c\uc11c, \ud074\ub77c\uc6b0\ub4dc \ud658\uacbd\uc744 \uc774\uc6a9\ud574 \uc2e4\uc2b5\ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c\uba74, ",(0,a.kt)("a",{parentName:"p",href:"https://mac.getutm.app/"},"UTM , Virtual machines for Mac"),"\uc744 \uc124\uce58\ud558\uc5ec \uac00\uc0c1 \uba38\uc2e0\uc744 \uc774\uc6a9\ud574\uc8fc\uc138\uc694.\n(\uc571\uc2a4\ud1a0\uc5b4\uc5d0\uc11c \uad6c\ub9e4\ud558\uc5ec \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 \uc77c\uc885\uc758 Donation \uac1c\ub150\uc758 \ube44\uc6a9 \uc9c0\ubd88\uc785\ub2c8\ub2e4. \ubb34\ub8cc \ubc84\uc804\uacfc \uc790\ub3d9 \uc5c5\ub370\uc774\ud2b8 \uc815\ub3c4\uc758 \ucc28\uc774\uac00 \uc788\uc5b4, \ubb34\ub8cc\ubc84\uc804\uc744 \uc0ac\uc6a9\ud574\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4.)\n\ud574\ub2f9 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"Ubuntu 20.04.3 LTS")," \uc2e4\uc2b5 \uc6b4\uc601\uccb4\uc81c\ub97c \uc9c0\uc6d0\ud558\uace0 \uc788\uc5b4, M1 Mac\uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc218\ud589\ud558\ub294 \uac83\uc744 \uac00\ub2a5\ud558\uac8c \ud569\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\ud558\uc9c0\ub9cc ",(0,a.kt)("a",{parentName:"p",href:"/docs/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \uc694\uc18c\ub4e4\uc744 \ubaa8\ub450 \uc0ac\uc6a9\ud574\ubcfc \uc218\ub294 \uc5c6\uae30\uc5d0, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4\ub9cc\uc744 \uc124\uce58\ud55c \ub4a4, \uc11c\ub85c \uc5f0\ub3d9\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 \ubd80\ubd84\uc744 \uc8fc\ub85c \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uc624\ud508\uc18c\uc2a4\uac00 \ud45c\uc900\uc744 \uc758\ubbf8\ud558\ub294 \uac83\uc740 \uc544\ub2c8\uba70, \uc5ec\ub7ec\ubd84\uc758 \uc0c1\ud669\uc5d0 \ub9de\uac8c \uc801\uc808\ud55c \ud234\uc744 \ucde8\uc0ac\uc120\ud0dd\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\uad6c\uc131-\uc694\uc18c"},"\uad6c\uc131 \uc694\uc18c"),(0,a.kt)("p",null,"\uc774 \uae00\uc5d0\uc11c \ub9cc\ub4e4\uc5b4 \ubcfc MLOps \uc2dc\uc2a4\ud15c\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uacfc \uac01 \ubc84\uc804\uc740 \uc544\ub798\uc640 \uac19\uc740 \ud658\uacbd\uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6d0\ud65c\ud55c \ud658\uacbd\uc5d0\uc11c \ud14c\uc2a4\ud2b8\ud558\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"\uc2f1\uae00 \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130 (\ud639\uc740 \ud074\ub7ec\uc2a4\ud130)")," \uc640 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub97c \ubd84\ub9ac\ud558\uc5ec \uc124\uba85\ud574 \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub294 \ub178\ud2b8\ubd81 \ud639\uc740 \ud074\ub7ec\uc2a4\ud130\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \uc678\uc758 \ud074\ub77c\uc774\uc5b8\ud2b8\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub2e4\ub978 \ub370\uc2a4\ud06c\ud1b1\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ub450 \ub300\uc758 \uba38\uc2e0\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub2e4\uba74 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \ub3d9\uc2dc\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud558\uc154\ub3c4 \uad1c\ucc2e\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"\ud074\ub7ec\uc2a4\ud130"},"\ud074\ub7ec\uc2a4\ud130"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"\uc544\ub798\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc124\uce58\ud574\uc57c \ud560 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA-Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"\uc544\ub798\ub294 Helm\uc744 \uc774\uc6a9\ud574 \uc124\uce58\ub418\uc5b4\uc57c \ud560 \uc368\ub4dc\ud30c\ud2f0 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"\ud074\ub77c\uc774\uc5b8\ud2b8"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),(0,a.kt)("p",null,"\ud074\ub77c\uc774\uc5b8\ud2b8\ub294 MacOS (Intel CPU), Ubuntu 20.04 \uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"\ubaa8\ub450\uc758 MLOps\ub97c \uc124\uce58\ud560 \ud074\ub7ec\uc2a4\ud130\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0ac\uc591\uc744 \ub9cc\uc871\uc2dc\ud0a4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 Kubernetes \ubc0f Kubeflow \uc758 \uad8c\uc7a5 \uc0ac\uc591\uc5d0 \uc758\uc874\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU : 6 core"),(0,a.kt)("li",{parentName:"ul"},"RAM : 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK : 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU : NVIDIA GPU (Optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1427],{3905:(t,e,n)=>{n.d(e,{Zo:()=>m,kt:()=>c});var r=n(7294);function a(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function l(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);e&&(r=r.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,r)}return n}function o(t){for(var e=1;e=0||(a[n]=t[n]);return a}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(a[n]=t[n])}return a}var i=r.createContext({}),u=function(t){var e=r.useContext(i),n=e;return t&&(n="function"==typeof t?t(e):o(o({},e),t)),n},m=function(t){var e=u(t.components);return r.createElement(i.Provider,{value:e},t.children)},s="mdxType",d={inlineCode:"code",wrapper:function(t){var e=t.children;return r.createElement(r.Fragment,{},e)}},k=r.forwardRef((function(t,e){var n=t.components,a=t.mdxType,l=t.originalType,i=t.parentName,m=p(t,["components","mdxType","originalType","parentName"]),s=u(n),k=a,c=s["".concat(i,".").concat(k)]||s[k]||d[k]||l;return n?r.createElement(c,o(o({ref:e},m),{},{components:n})):r.createElement(c,o({ref:e},m))}));function c(t,e){var n=arguments,a=e&&e.mdxType;if("string"==typeof t||a){var l=n.length,o=new Array(l);o[0]=k;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[s]="string"==typeof t?t:a,o[1]=p;for(var u=2;u{n.r(e),n.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>d,frontMatter:()=>l,metadata:()=>p,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const l={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},o=void 0,p={unversionedId:"setup-kubernetes/intro",id:"setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/docs/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/docs/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/docs/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/docs/setup-kubernetes/kubernetes"}},i={},u=[{value:"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30",id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30",level:2},{value:"\uad6c\uc131 \uc694\uc18c",id:"\uad6c\uc131-\uc694\uc18c",level:2},{value:"\ud074\ub7ec\uc2a4\ud130",id:"\ud074\ub7ec\uc2a4\ud130",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"\ud074\ub77c\uc774\uc5b8\ud2b8",id:"\ud074\ub77c\uc774\uc5b8\ud2b8",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],m={toc:u},s="wrapper";function d(t){let{components:e,...n}=t;return(0,a.kt)(s,(0,r.Z)({},m,n,{components:e,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"mlops-\uc2dc\uc2a4\ud15c-\uad6c\ucd95\ud574\ubcf4\uae30"},"MLOps \uc2dc\uc2a4\ud15c \uad6c\ucd95\ud574\ubcf4\uae30"),(0,a.kt)("p",null,"MLOps\ub97c \uacf5\ubd80\ud558\ub294 \ub370 \uc788\uc5b4\uc11c \uac00\uc7a5 \ud070 \uc7a5\ubcbd\uc740 MLOps \uc2dc\uc2a4\ud15c\uc744 \uad6c\uc131\ud574\ubcf4\uace0 \uc0ac\uc6a9\ud574\ubcf4\uae30\uac00 \uc5b4\ub835\ub2e4\ub294 \uc810\uc785\ub2c8\ub2e4. AWS, GCP \ub4f1\uc758 \ud37c\ube14\ub9ad \ud074\ub77c\uc6b0\ub4dc \ud639\uc740 Weight & Bias, neptune.ai \ub4f1\uc758 \uc0c1\uc6a9 \ud234\uc744 \uc0ac\uc6a9\ud574\ubcf4\uae30\uc5d0\ub294 \uacfc\uae08\uc5d0 \ub300\ud55c \ubd80\ub2f4\uc774 \uc874\uc7ac\ud558\uace0, \ucc98\uc74c\ubd80\ud130 \ubaa8\ub4e0 \ud658\uacbd\uc744 \ud63c\uc790\uc11c \uad6c\uc131\ud558\uae30\uc5d0\ub294 \uc5b4\ub514\uc11c\ubd80\ud130 \uc2dc\uc791\ud574\uc57c \ud560\uc9c0 \ub9c9\ub9c9\ud558\uac8c \ub290\uaef4\uc9c8 \uc218\ubc16\uc5d0 \uc5c6\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc774\ub7f0 \uc774\uc720\ub4e4\ub85c MLOps\ub97c \uc120\ub73b \uc2dc\uc791\ud574\ubcf4\uc9c0 \ubabb\ud558\uc2dc\ub294 \ubd84\ub4e4\uc744 \uc704\ud574, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub9cc \uc900\ube44\ub418\uc5b4 \uc788\ub2e4\uba74 MLOps \uc2dc\uc2a4\ud15c\uc744 \ubc11\ubc14\ub2e5\ubd80\ud130 \uad6c\ucd95\ud558\uace0 \uc0ac\uc6a9\ud574 \ubcfc \uc218 \uc788\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub294 \uacbd\uc6b0, \uac00\uc0c1\uba38\uc2e0\uc744 \ud65c\uc6a9\ud558\uc5ec \ud658\uacbd\uc744 \uad6c\uc131\ud558\uae30"),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"Windows \ud639\uc740 Intel Mac\uc744 \uc0ac\uc6a9\ud574 ",(0,a.kt)("inlineCode",{parentName:"p"},"\ubaa8\ub450\uc758 MLops")," \uc2e4\uc2b5\uc744 \uc9c4\ud589 \uc911\uc778 \ubd84\ub4e4\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"Virtual Box"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"VMware")," \ub4f1\uc758 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub97c \uc774\uc6a9\ud558\uc5ec \uc6b0\ubd84\ud22c \ub370\uc2a4\ud06c\ud0d1 \ud658\uacbd\uc744 \uc900\ube44\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774 \ub54c, \uad8c\uc7a5 \uc0ac\uc591\uc744 \ub9de\ucdb0 \uac00\uc0c1 \uba38\uc2e0\uc744 \uc0dd\uc131\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.\n\ub610\ud55c, M1 Mac\uc744 \uc0ac\uc6a9\ud558\uc2dc\ub294 \ubd84\ub4e4\uc740 \uc791\uc131\uc77c(2022\ub144 2\uc6d4) \uae30\uc900\uc73c\ub85c\ub294 Virtual Box, VMware \ub294 \uc774\uc6a9\ud560 \uc218 \uc5c6\uc2b5\ub2c8\ub2e4. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"M1 Apple Silicone Mac\uc5d0 \ucd5c\uc801\ud654\ub41c macOS \uc571 \uc9c0\uc6d0 \ud655\uc778\ud558\uae30"),")\n\ub530\ub77c\uc11c, \ud074\ub77c\uc6b0\ub4dc \ud658\uacbd\uc744 \uc774\uc6a9\ud574 \uc2e4\uc2b5\ud558\ub294 \uac83\uc774 \uc544\ub2c8\ub77c\uba74, ",(0,a.kt)("a",{parentName:"p",href:"https://mac.getutm.app/"},"UTM , Virtual machines for Mac"),"\uc744 \uc124\uce58\ud558\uc5ec \uac00\uc0c1 \uba38\uc2e0\uc744 \uc774\uc6a9\ud574\uc8fc\uc138\uc694.\n(\uc571\uc2a4\ud1a0\uc5b4\uc5d0\uc11c \uad6c\ub9e4\ud558\uc5ec \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\ub294 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 \uc77c\uc885\uc758 Donation \uac1c\ub150\uc758 \ube44\uc6a9 \uc9c0\ubd88\uc785\ub2c8\ub2e4. \ubb34\ub8cc \ubc84\uc804\uacfc \uc790\ub3d9 \uc5c5\ub370\uc774\ud2b8 \uc815\ub3c4\uc758 \ucc28\uc774\uac00 \uc788\uc5b4, \ubb34\ub8cc\ubc84\uc804\uc744 \uc0ac\uc6a9\ud574\ub3c4 \ubb34\ubc29\ud569\ub2c8\ub2e4.)\n\ud574\ub2f9 \uac00\uc0c1\uba38\uc2e0 \uc18c\ud504\ud2b8\uc6e8\uc5b4\ub294 ",(0,a.kt)("inlineCode",{parentName:"p"},"Ubuntu 20.04.3 LTS")," \uc2e4\uc2b5 \uc6b4\uc601\uccb4\uc81c\ub97c \uc9c0\uc6d0\ud558\uace0 \uc788\uc5b4, M1 Mac\uc5d0\uc11c \uc2e4\uc2b5\uc744 \uc218\ud589\ud558\ub294 \uac83\uc744 \uac00\ub2a5\ud558\uac8c \ud569\ub2c8\ub2e4.")),(0,a.kt)("p",null,"\ud558\uc9c0\ub9cc ",(0,a.kt)("a",{parentName:"p",href:"/docs/introduction/component"},"MLOps\uc758 \uad6c\uc131\uc694\uc18c"),"\uc5d0\uc11c \uc124\uba85\ud558\ub294 \uc694\uc18c\ub4e4\uc744 \ubaa8\ub450 \uc0ac\uc6a9\ud574\ubcfc \uc218\ub294 \uc5c6\uae30\uc5d0, ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 \ub300\ud45c\uc801\uc778 \uc624\ud508\uc18c\uc2a4\ub9cc\uc744 \uc124\uce58\ud55c \ub4a4, \uc11c\ub85c \uc5f0\ub3d9\ud558\uc5ec \uc0ac\uc6a9\ud558\ub294 \ubd80\ubd84\uc744 \uc8fc\ub85c \ub2e4\ub8f0 \uc608\uc815\uc785\ub2c8\ub2e4."),(0,a.kt)("p",null,(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c \uc124\uce58\ud558\ub294 \uc624\ud508\uc18c\uc2a4\uac00 \ud45c\uc900\uc744 \uc758\ubbf8\ud558\ub294 \uac83\uc740 \uc544\ub2c8\uba70, \uc5ec\ub7ec\ubd84\uc758 \uc0c1\ud669\uc5d0 \ub9de\uac8c \uc801\uc808\ud55c \ud234\uc744 \ucde8\uc0ac\uc120\ud0dd\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4."),(0,a.kt)("h2",{id:"\uad6c\uc131-\uc694\uc18c"},"\uad6c\uc131 \uc694\uc18c"),(0,a.kt)("p",null,"\uc774 \uae00\uc5d0\uc11c \ub9cc\ub4e4\uc5b4 \ubcfc MLOps \uc2dc\uc2a4\ud15c\uc758 \uad6c\uc131 \uc694\uc18c\ub4e4\uacfc \uac01 \ubc84\uc804\uc740 \uc544\ub798\uc640 \uac19\uc740 \ud658\uacbd\uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc6d0\ud65c\ud55c \ud658\uacbd\uc5d0\uc11c \ud14c\uc2a4\ud2b8\ud558\uae30 \uc704\ud574 ",(0,a.kt)("strong",{parentName:"p"},"\uc2f1\uae00 \ub178\ub4dc \ud074\ub7ec\uc2a4\ud130 (\ud639\uc740 \ud074\ub7ec\uc2a4\ud130)")," \uc640 ",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub97c \ubd84\ub9ac\ud558\uc5ec \uc124\uba85\ud574 \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub294 \uc6b0\ubd84\ud22c\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \uc758\ubbf8\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n",(0,a.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub294 \ub178\ud2b8\ubd81 \ud639\uc740 \ud074\ub7ec\uc2a4\ud130\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294 \ub370\uc2a4\ud06c\ud1b1 \uc678\uc758 \ud074\ub77c\uc774\uc5b8\ud2b8\ub85c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub2e4\ub978 \ub370\uc2a4\ud06c\ud1b1\uc744 \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ud558\uc9c0\ub9cc \ub450 \ub300\uc758 \uba38\uc2e0\uc744 \uc900\ube44\ud560 \uc218 \uc5c6\ub2e4\uba74 \ub370\uc2a4\ud06c\ud1b1 \ud558\ub098\ub97c \ub3d9\uc2dc\uc5d0 \ud074\ub7ec\uc2a4\ud130\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc6a9\ub3c4\ub85c \uc0ac\uc6a9\ud558\uc154\ub3c4 \uad1c\ucc2e\uc2b5\ub2c8\ub2e4."),(0,a.kt)("h3",{id:"\ud074\ub7ec\uc2a4\ud130"},"\ud074\ub7ec\uc2a4\ud130"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"\uc544\ub798\ub294 \ud074\ub7ec\uc2a4\ud130\uc5d0 \uc124\uce58\ud574\uc57c \ud560 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA-Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"\uc544\ub798\ub294 Helm\uc744 \uc774\uc6a9\ud574 \uc124\uce58\ub418\uc5b4\uc57c \ud560 \uc368\ub4dc\ud30c\ud2f0 \uc18c\ud504\ud2b8\uc6e8\uc5b4 \ubaa9\ub85d\uc785\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"\ud074\ub77c\uc774\uc5b8\ud2b8"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),(0,a.kt)("p",null,"\ud074\ub77c\uc774\uc5b8\ud2b8\ub294 MacOS (Intel CPU), Ubuntu 20.04 \uc5d0\uc11c \uac80\uc99d\ub418\uc5c8\uc2b5\ub2c8\ub2e4."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"\ubaa8\ub450\uc758 MLOps\ub97c \uc124\uce58\ud560 \ud074\ub7ec\uc2a4\ud130\ub294 \ub2e4\uc74c\uacfc \uac19\uc740 \uc0ac\uc591\uc744 \ub9cc\uc871\uc2dc\ud0a4\ub294 \uac83\uc744 \uad8c\uc7a5\ud569\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\uc774\ub294 Kubernetes \ubc0f Kubeflow \uc758 \uad8c\uc7a5 \uc0ac\uc591\uc5d0 \uc758\uc874\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU : 6 core"),(0,a.kt)("li",{parentName:"ul"},"RAM : 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK : 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU : NVIDIA GPU (Optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fb6a8c4a.270cbf4d.js b/assets/js/fb6a8c4a.54e59c3f.js similarity index 99% rename from assets/js/fb6a8c4a.270cbf4d.js rename to assets/js/fb6a8c4a.54e59c3f.js index 5f9f176c..725609a2 100644 --- a/assets/js/fb6a8c4a.270cbf4d.js +++ b/assets/js/fb6a8c4a.54e59c3f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9229],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>m});var i=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);n&&(i=i.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,i)}return t}function l(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var u=i.createContext({}),o=function(e){var n=i.useContext(u),t=n;return e&&(t="function"==typeof e?e(n):l(l({},n),e)),t},p=function(e){var n=o(e.components);return i.createElement(u.Provider,{value:n},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return i.createElement(i.Fragment,{},n)}},b=i.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,u=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),d=o(t),b=r,m=d["".concat(u,".").concat(b)]||d[b]||k[b]||a;return t?i.createElement(m,l(l({ref:n},p),{},{components:t})):i.createElement(m,l({ref:n},p))}));function m(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,l=new Array(a);l[0]=b;var s={};for(var u in n)hasOwnProperty.call(n,u)&&(s[u]=n[u]);s.originalType=e,s[d]="string"==typeof e?e:r,l[1]=s;for(var o=2;o{t.r(n),t.d(n,{assets:()=>u,contentTitle:()=>l,default:()=>k,frontMatter:()=>a,metadata:()=>s,toc:()=>o});var i=t(7462),r=(t(7294),t(3905));const a={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/docs/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:o},d="wrapper";function k(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,i.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,r.kt)("p",null,"Minikube\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, v1.24.0 \ubc84\uc804\uc758 Minikube \ubc14\uc774\ub108\ub9ac\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,r.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\uc81c Minikube\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uad6c\ucd95\ud569\ub2c8\ub2e4.\nGPU \uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uacfc \ud074\ub7ec\uc2a4\ud130-\ud074\ub77c\uc774\uc5b8\ud2b8 \uac04 \ud1b5\uc2e0\uc744 \uac04\ud3b8\ud558\uac8c \uc218\ud589\ud558\uae30 \uc704\ud574, Minikube \ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc744 \ud65c\uc6a9\ud558\uc5ec \uc2e4\ud589\ud569\ub2c8\ub2e4. ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc740 root user \ub85c \uc2e4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"root user\ub85c \uc804\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"minikube start"),"\ub97c \uc218\ud589\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uad6c\ucd95\uc744 \uc9c4\ud589\ud569\ub2c8\ub2e4. Kubeflow\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud574, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc84\uc804\uc740 v1.21.7\ub85c \uc9c0\uc815\ud558\uc5ec \uad6c\ucd95\ud558\uba70 ",(0,r.kt)("inlineCode",{parentName:"p"},"--extra-config"),"\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,r.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,r.kt)("p",null,"Minikube\ub97c \uc124\uce58\ud558\uba74 Default\ub85c \uc124\uce58\ub418\ub294 addon\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774 \uc911 \uc800\ud76c\uac00 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\uc744 addon\uc744 \ube44\ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,r.kt)("p",null,"\ubaa8\ub4e0 addon\uc774 \ube44\ud65c\uc131\ud654\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,r.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud55c \ub3c4\uad6c\ub97c \uc124\uce58\ud569\ub2c8\ub2e4.\n",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0\uc5d0\ub294 root user\ub85c \ubaa8\ub4e0 \uc791\uc5c5\uc744 \uc9c4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758\ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, \uc6b0\uc120 kubernetes\uc758 \uad00\ub9ac\uc790 \uc778\uc99d \uc815\ubcf4\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub85c \uac00\uc838\uc635\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0\uc11c config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\nminikube kubectl -- config view --flatten\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uc815\ubcf4\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,r.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,r.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},".kube")," \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\nmkdir -p /home/$USER/.kube\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud574\ub2f9 \ud30c\uc77c\uc5d0 2. \uc5d0\uc11c \ucd9c\ub825\ub41c \uc815\ubcf4\ub97c \ubd99\uc5ec\ub123\uc740 \ub4a4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,r.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"helm"),(0,r.kt)("li",{parentName:"ul"},"kustomize"),(0,r.kt)("li",{parentName:"ul"},"CSI plugin"),(0,r.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,r.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9229],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>m});var i=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);n&&(i=i.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,i)}return t}function l(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var u=i.createContext({}),o=function(e){var n=i.useContext(u),t=n;return e&&(t="function"==typeof e?e(n):l(l({},n),e)),t},p=function(e){var n=o(e.components);return i.createElement(u.Provider,{value:n},e.children)},d="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return i.createElement(i.Fragment,{},n)}},b=i.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,u=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),d=o(t),b=r,m=d["".concat(u,".").concat(b)]||d[b]||k[b]||a;return t?i.createElement(m,l(l({ref:n},p),{},{components:t})):i.createElement(m,l({ref:n},p))}));function m(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,l=new Array(a);l[0]=b;var s={};for(var u in n)hasOwnProperty.call(n,u)&&(s[u]=n[u]);s.originalType=e,s[d]="string"==typeof e?e:r,l[1]=s;for(var o=2;o{t.r(n),t.d(n,{assets:()=>u,contentTitle:()=>l,default:()=>k,frontMatter:()=>a,metadata:()=>s,toc:()=>o});var i=t(7462),r=(t(7294),t(3905));const a={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/docs/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5",id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5",id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5",level:2},{value:"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58",id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58",level:2},{value:"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:2}],p={toc:o},d="wrapper";function k(e){let{components:n,...t}=e;return(0,r.kt)(d,(0,i.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,r.kt)("p",null,"\ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c \uad6c\ucd95\ud558\uae30\uc5d0 \uc55e\uc11c, \ud544\uc694\ud55c \uad6c\uc131 \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite"),"\uc744 \ucc38\uace0\ud558\uc5ec Kubernetes\ub97c \uc124\uce58\ud558\uae30 \uc804\uc5d0 \ud544\uc694\ud55c \uc694\uc18c\ub4e4\uc744 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,r.kt)("p",null,"Minikube\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574, v1.24.0 \ubc84\uc804\uc758 Minikube \ubc14\uc774\ub108\ub9ac\ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,r.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uc5c8\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,r.kt)("h2",{id:"2-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub7ec\uc2a4\ud130-\uc14b\uc5c5"},"2. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\uc81c Minikube\ub97c \ud65c\uc6a9\ud574 \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130\uc5d0")," \uad6c\ucd95\ud569\ub2c8\ub2e4.\nGPU \uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uacfc \ud074\ub7ec\uc2a4\ud130-\ud074\ub77c\uc774\uc5b8\ud2b8 \uac04 \ud1b5\uc2e0\uc744 \uac04\ud3b8\ud558\uac8c \uc218\ud589\ud558\uae30 \uc704\ud574, Minikube \ub294 ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc744 \ud65c\uc6a9\ud558\uc5ec \uc2e4\ud589\ud569\ub2c8\ub2e4. ",(0,r.kt)("inlineCode",{parentName:"p"},"driver=none")," \uc635\uc158\uc740 root user \ub85c \uc2e4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,"root user\ub85c \uc804\ud658\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"minikube start"),"\ub97c \uc218\ud589\ud558\uc5ec \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub7ec\uc2a4\ud130 \uad6c\ucd95\uc744 \uc9c4\ud589\ud569\ub2c8\ub2e4. Kubeflow\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud574, \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ubc84\uc804\uc740 v1.21.7\ub85c \uc9c0\uc815\ud558\uc5ec \uad6c\ucd95\ud558\uba70 ",(0,r.kt)("inlineCode",{parentName:"p"},"--extra-config"),"\ub97c \ucd94\uac00\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,r.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,r.kt)("p",null,"Minikube\ub97c \uc124\uce58\ud558\uba74 Default\ub85c \uc124\uce58\ub418\ub294 addon\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4. \uc774 \uc911 \uc800\ud76c\uac00 \uc0ac\uc6a9\ud558\uc9c0 \uc54a\uc744 addon\uc744 \ube44\ud65c\uc131\ud654\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,r.kt)("p",null,"\ubaa8\ub4e0 addon\uc774 \ube44\ud65c\uc131\ud654\ub41c \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,r.kt)("h2",{id:"3-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\ud074\ub77c\uc774\uc5b8\ud2b8-\uc14b\uc5c5"},"3. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \ud074\ub77c\uc774\uc5b8\ud2b8 \uc14b\uc5c5"),(0,r.kt)("p",null,"\uc774\ubc88\uc5d0\ub294 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc5d0 \ucfe0\ubc84\ub124\ud2f0\uc2a4\uc758 \uc6d0\ud65c\ud55c \uc0ac\uc6a9\uc744 \uc704\ud55c \ub3c4\uad6c\ub97c \uc124\uce58\ud569\ub2c8\ub2e4.\n",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub418\uc9c0 \uc54a\uc740 \uacbd\uc6b0\uc5d0\ub294 root user\ub85c \ubaa8\ub4e0 \uc791\uc5c5\uc744 \uc9c4\ud589\ud574\uc57c \ud568\uc5d0 \uc8fc\uc758\ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\uc640 ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130")," \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, \uc6b0\uc120 kubernetes\uc758 \uad00\ub9ac\uc790 \uc778\uc99d \uc815\ubcf4\ub97c ",(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8"),"\ub85c \uac00\uc838\uc635\ub2c8\ub2e4."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub7ec\uc2a4\ud130"),"\uc5d0\uc11c config\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\nminikube kubectl -- config view --flatten\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ub2e4\uc74c\uacfc \uac19\uc740 \uc815\ubcf4\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,r.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,r.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"")))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("strong",{parentName:"p"},"\ud074\ub77c\uc774\uc5b8\ud2b8")," \ub178\ub4dc\uc5d0\uc11c ",(0,r.kt)("inlineCode",{parentName:"p"},".kube")," \ud3f4\ub354\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\nmkdir -p /home/$USER/.kube\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\ud574\ub2f9 \ud30c\uc77c\uc5d0 2. \uc5d0\uc11c \ucd9c\ub825\ub41c \uc815\ubcf4\ub97c \ubd99\uc5ec\ub123\uc740 \ub4a4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,r.kt)("h2",{id:"4-\ucfe0\ubc84\ub124\ud2f0\uc2a4-\uae30\ubcf8-\ubaa8\ub4c8-\uc124\uce58"},"4. \ucfe0\ubc84\ub124\ud2f0\uc2a4 \uae30\ubcf8 \ubaa8\ub4c8 \uc124\uce58"),(0,r.kt)("p",null,(0,r.kt)("a",{parentName:"p",href:"/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules"),"\uc744 \ucc38\uace0\ud558\uc5ec \ub2e4\uc74c \ucef4\ud3ec\ub10c\ud2b8\ub4e4\uc744 \uc124\uce58\ud574 \uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"helm"),(0,r.kt)("li",{parentName:"ul"},"kustomize"),(0,r.kt)("li",{parentName:"ul"},"CSI plugin"),(0,r.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,r.kt)("h2",{id:"5-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"5. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,r.kt)("p",null,"\ucd5c\uc885\uc801\uc73c\ub85c node\uac00 Ready \uc778\uc9c0, OS, Docker, Kubernetes \ubc84\uc804\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,r.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ubcf4\uc774\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fb803674.1ede9e09.js b/assets/js/fb803674.9a691c48.js similarity index 99% rename from assets/js/fb803674.1ede9e09.js rename to assets/js/fb803674.9a691c48.js index 0f14c454..1c28481a 100644 --- a/assets/js/fb803674.1ede9e09.js +++ b/assets/js/fb803674.9a691c48.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8834],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>y});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=l,y=u["".concat(s,".").concat(c)]||u[c]||h[c]||r;return t?a.createElement(y,i(i({ref:n},v),{},{components:t})):a.createElement(y,i({ref:n},v))}));function y(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,i=new Array(r);i[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:l,i[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>h,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const r={title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},i=void 0,o={unversionedId:"appendix/pyenv",id:"appendix/pyenv",title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",description:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",source:"@site/docs/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/docs/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/pyenv.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:1,frontMatter:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/docs/api-deployment/seldon-children"},next:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/appendix/metallb"}},s={},p=[{value:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd",level:2},{value:"pyenv \uc124\uce58",id:"pyenv-\uc124\uce58",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"\uc124\uce58 - macOS",id:"\uc124\uce58---macos",level:3},{value:"\uc124\uce58 - Ubuntu",id:"\uc124\uce58---ubuntu",level:3},{value:"pyenv \uc0ac\uc6a9",id:"pyenv-\uc0ac\uc6a9",level:2},{value:"Python \ubc84\uc804 \uc124\uce58",id:"python-\ubc84\uc804-\uc124\uce58",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654",id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,l.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd"},"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),(0,l.kt)("p",null,"Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\ub2e4 \ubcf4\uba74 \uc5ec\ub7ec \ubc84\uc804\uc758 Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\ub098, \uc5ec\ub7ec \ud504\ub85c\uc81d\ud2b8\ubcc4 \ud328\ud0a4\uc9c0 \ubc84\uc804\uc744 \ub530\ub85c \uad00\ub9ac\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uac00 \ubc1c\uc0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\ucc98\ub7fc Python \ud658\uacbd \ud639\uc740 Python Package \ud658\uacbd\uc744 \uac00\uc0c1\ud654\ud558\uc5ec \uad00\ub9ac\ud558\ub294 \uac83\uc744 \uc27d\uac8c \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\ub85c\ub294 pyenv, conda, virtualenv, venv \ub4f1\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774 \uc911 ",(0,l.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv"),"\uc640 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),"\ub97c \uc124\uce58\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","pyenv\ub294 Python \ubc84\uc804\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\uba70, pyenv-virtualenv\ub294 pyenv\uc758 plugin\uc73c\ub85c\uc368 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0 \ud658\uacbd\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc90d\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pyenv-\uc124\uce58"},"pyenv \uc124\uce58"),(0,l.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,l.kt)("p",null,"\uc6b4\uc601 \uccb4\uc81c\ubcc4\ub85c Prerequisites\uac00 \ub2e4\ub985\ub2c8\ub2e4. ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"\ub2e4\uc74c \ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc5ec \ud544\uc218 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"\uc124\uce58---macos"},"\uc124\uce58 - macOS"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"macOS\uc758 \uacbd\uc6b0 \uce74\ud0c8\ub9ac\ub098 \ubc84\uc804 \uc774\ud6c4 \uae30\ubcf8 shell\uc774 zsh\ub85c \ubcc0\uacbd\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 zsh\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h3",{id:"\uc124\uce58---ubuntu"},"\uc124\uce58 - Ubuntu"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc774 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\n\uc911\ub7b5...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"\uae30\ubcf8 shell\ub85c bash shell\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4.\nbash\uc5d0\uc11c pyenv\uc640 pyenv-virtualenv \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \ubb38\uc790\uc5f4\uc744 \uc785\ub825\ud55c \ud6c4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,l.kt)("p",null,"shell\uc744 restart \ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h2",{id:"pyenv-\uc0ac\uc6a9"},"pyenv \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"python-\ubc84\uc804-\uc124\uce58"},"Python \ubc84\uc804 \uc124\uce58"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv install ")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc744 \uc124\uce58\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc608\uc2dc\ub85c kubeflow\uc5d0\uc11c \uae30\ubcf8\uc73c\ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc774\uc36c 3.7.12 \ubc84\uc804\uc744 \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131"},"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv <\uac00\uc0c1\ud658\uacbd-\uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc758 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c Python 3.7.12 \ubc84\uc804\uc758 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9"},"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv activate <\uac00\uc0c1\ud658\uacbd \uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc704\uc640 \uac19\uc740 \ubc29\uc2dd\uc73c\ub85c \uc0dd\uc131\ud55c \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ud604\uc7ac \uac00\uc0c1\ud658\uacbd\uc758 \uc815\ubcf4\uac00 shell\uc758 \ub9e8 \uc55e\uc5d0 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654"},"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"source deactivate")," \uba85\ub839\uc744 \ud1b5\ud574 \ud604\uc7ac \uc0ac\uc6a9 \uc911\uc778 \uac00\uc0c1\ud658\uacbd\uc744 \ube44\ud65c\uc131\ud654\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8834],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>y});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=l,y=u["".concat(s,".").concat(c)]||u[c]||h[c]||r;return t?a.createElement(y,i(i({ref:n},v),{},{components:t})):a.createElement(y,i({ref:n},v))}));function y(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,i=new Array(r);i[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:l,i[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>h,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=t(7462),l=(t(7294),t(3905));const r={title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},i=void 0,o={unversionedId:"appendix/pyenv",id:"appendix/pyenv",title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",description:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",source:"@site/docs/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/docs/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/pyenv.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:1,frontMatter:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/docs/api-deployment/seldon-children"},next:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",permalink:"/docs/appendix/metallb"}},s={},p=[{value:"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd",id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd",level:2},{value:"pyenv \uc124\uce58",id:"pyenv-\uc124\uce58",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"\uc124\uce58 - macOS",id:"\uc124\uce58---macos",level:3},{value:"\uc124\uce58 - Ubuntu",id:"\uc124\uce58---ubuntu",level:3},{value:"pyenv \uc0ac\uc6a9",id:"pyenv-\uc0ac\uc6a9",level:2},{value:"Python \ubc84\uc804 \uc124\uce58",id:"python-\ubc84\uc804-\uc124\uce58",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9",id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9",level:3},{value:"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654",id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,l.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"\ud30c\uc774\uc36c-\uac00\uc0c1\ud658\uacbd"},"\ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd"),(0,l.kt)("p",null,"Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\ub2e4 \ubcf4\uba74 \uc5ec\ub7ec \ubc84\uc804\uc758 Python \ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\ub098, \uc5ec\ub7ec \ud504\ub85c\uc81d\ud2b8\ubcc4 \ud328\ud0a4\uc9c0 \ubc84\uc804\uc744 \ub530\ub85c \uad00\ub9ac\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0\uac00 \ubc1c\uc0dd\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774\ucc98\ub7fc Python \ud658\uacbd \ud639\uc740 Python Package \ud658\uacbd\uc744 \uac00\uc0c1\ud654\ud558\uc5ec \uad00\ub9ac\ud558\ub294 \uac83\uc744 \uc27d\uac8c \ub3c4\uc640\uc8fc\ub294 \ub3c4\uad6c\ub85c\ub294 pyenv, conda, virtualenv, venv \ub4f1\uc774 \uc874\uc7ac\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc774 \uc911 ",(0,l.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps"),"\uc5d0\uc11c\ub294 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv"),"\uc640 ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),"\ub97c \uc124\uce58\ud558\ub294 \ubc29\ubc95\uc744 \ub2e4\ub8f9\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","pyenv\ub294 Python \ubc84\uc804\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc8fc\uba70, pyenv-virtualenv\ub294 pyenv\uc758 plugin\uc73c\ub85c\uc368 \ud30c\uc774\uc36c \ud328\ud0a4\uc9c0 \ud658\uacbd\uc744 \uad00\ub9ac\ud558\ub294 \uac83\uc744 \ub3c4\uc640\uc90d\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"pyenv-\uc124\uce58"},"pyenv \uc124\uce58"),(0,l.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,l.kt)("p",null,"\uc6b4\uc601 \uccb4\uc81c\ubcc4\ub85c Prerequisites\uac00 \ub2e4\ub985\ub2c8\ub2e4. ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"\ub2e4\uc74c \ud398\uc774\uc9c0"),"\ub97c \ucc38\uace0\ud558\uc5ec \ud544\uc218 \ud328\ud0a4\uc9c0\ub4e4\uc744 \uc124\uce58\ud574\uc8fc\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"\uc124\uce58---macos"},"\uc124\uce58 - macOS"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"macOS\uc758 \uacbd\uc6b0 \uce74\ud0c8\ub9ac\ub098 \ubc84\uc804 \uc774\ud6c4 \uae30\ubcf8 shell\uc774 zsh\ub85c \ubcc0\uacbd\ub418\uc5c8\uae30 \ub54c\ubb38\uc5d0 zsh\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h3",{id:"\uc124\uce58---ubuntu"},"\uc124\uce58 - Ubuntu"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"pyenv, pyenv-virtualenv \uc124\uce58")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \ub0b4\uc6a9\uc774 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\n\uc911\ub7b5...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},"pyenv \uc124\uc815")),(0,l.kt)("p",null,"\uae30\ubcf8 shell\ub85c bash shell\uc744 \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0\ub97c \uac00\uc815\ud558\uc600\uc2b5\ub2c8\ub2e4.\nbash\uc5d0\uc11c pyenv\uc640 pyenv-virtualenv \ub97c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub3c4\ub85d \uc124\uc815\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,l.kt)("p",null,"\ub2e4\uc74c \ubb38\uc790\uc5f4\uc744 \uc785\ub825\ud55c \ud6c4 \uc800\uc7a5\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,l.kt)("p",null,"shell\uc744 restart \ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,l.kt)("p",null,"pyenv \uba85\ub839\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub418\uba74 \uc815\uc0c1\uc801\uc73c\ub85c \uc124\uc815\ub41c \uac83\uc744 \uc758\ubbf8\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,l.kt)("h2",{id:"pyenv-\uc0ac\uc6a9"},"pyenv \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"python-\ubc84\uc804-\uc124\uce58"},"Python \ubc84\uc804 \uc124\uce58"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv install ")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc744 \uc124\uce58\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n\uc774\ubc88 \ud398\uc774\uc9c0\uc5d0\uc11c\ub294 \uc608\uc2dc\ub85c kubeflow\uc5d0\uc11c \uae30\ubcf8\uc73c\ub85c \uc0ac\uc6a9\ud558\ub294 \ud30c\uc774\uc36c 3.7.12 \ubc84\uc804\uc744 \uc124\uce58\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc124\uce58\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc740 \uba54\uc2dc\uc9c0\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0dd\uc131"},"Python \uac00\uc0c1\ud658\uacbd \uc0dd\uc131"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv <\uac00\uc0c1\ud658\uacbd-\uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc6d0\ud558\ub294 \ud30c\uc774\uc36c \ubc84\uc804\uc758 \ud30c\uc774\uc36c \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c Python 3.7.12 \ubc84\uc804\uc758 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0dd\uc131\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\uc0ac\uc6a9"},"Python \uac00\uc0c1\ud658\uacbd \uc0ac\uc6a9"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"pyenv activate <\uac00\uc0c1\ud658\uacbd \uc774\ub984>")," \uba85\ub839\uc744 \ud1b5\ud574 \uc704\uc640 \uac19\uc740 \ubc29\uc2dd\uc73c\ub85c \uc0dd\uc131\ud55c \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"\uc608\uc2dc\ub85c\ub294 ",(0,l.kt)("inlineCode",{parentName:"p"},"demo"),"\ub77c\ub294 \uc774\ub984\uc758 Python \uac00\uc0c1\ud658\uacbd\uc744 \uc0ac\uc6a9\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,l.kt)("p",null,"\ub2e4\uc74c\uacfc \uac19\uc774 \ud604\uc7ac \uac00\uc0c1\ud658\uacbd\uc758 \uc815\ubcf4\uac00 shell\uc758 \ub9e8 \uc55e\uc5d0 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,l.kt)("h3",{id:"python-\uac00\uc0c1\ud658\uacbd-\ube44\ud65c\uc131\ud654"},"Python \uac00\uc0c1\ud658\uacbd \ube44\ud65c\uc131\ud654"),(0,l.kt)("p",null,(0,l.kt)("inlineCode",{parentName:"p"},"source deactivate")," \uba85\ub839\uc744 \ud1b5\ud574 \ud604\uc7ac \uc0ac\uc6a9 \uc911\uc778 \uac00\uc0c1\ud658\uacbd\uc744 \ube44\ud65c\uc131\ud654\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,l.kt)("p",null," Before"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,l.kt)("p",null," After"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/fc8ffda8.d92bd484.js b/assets/js/fc8ffda8.8a53f4cb.js similarity index 99% rename from assets/js/fc8ffda8.d92bd484.js rename to assets/js/fc8ffda8.8a53f4cb.js index 6125d21d..a34d91f1 100644 --- a/assets/js/fc8ffda8.d92bd484.js +++ b/assets/js/fc8ffda8.8a53f4cb.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[860],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>k});var t=n(7294);function l(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a=0||(l[n]=e[n]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(l[n]=e[n])}return l}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},c=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},d=t.forwardRef((function(e,a){var n=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(n),d=l,k=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return n?t.createElement(k,s(s({ref:a},c),{},{components:n})):t.createElement(k,s({ref:a},c))}));function k(e,a){var n=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=n.length,s=new Array(r);s[0]=d;var o={};for(var i in a)hasOwnProperty.call(a,i)&&(o[i]=a[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var t=n(7462),l=(n(7294),n(3905));const r={title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},s=void 0,o={unversionedId:"appendix/metallb",id:"version-1.0/appendix/metallb",title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",description:"MetalLB\ub780?",source:"@site/versioned_docs/version-1.0/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/docs/1.0/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/metallb.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"2023\ub144 7\uc6d4 11\uc77c",sidebarPosition:2,frontMatter:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/1.0/appendix/pyenv"},next:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",permalink:"/docs/1.0/further-readings/info"}},i={},p=[{value:"MetalLB\ub780?",id:"metallb\ub780",level:2},{value:"\uc694\uad6c\uc0ac\ud56d",id:"\uc694\uad6c\uc0ac\ud56d",level:2},{value:"MetalLB \uc124\uce58",id:"metallb-\uc124\uce58",level:2},{value:"Preparation",id:"preparation",level:3},{value:"\uc124\uce58 - Manifest",id:"\uc124\uce58---manifest",level:3},{value:"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4.",id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4",level:4},{value:"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"MetalLB \uc0ac\uc6a9",id:"metallb-\uc0ac\uc6a9",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},m="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(m,(0,t.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"metallb\ub780"},"MetalLB\ub780?"),(0,l.kt)("p",null,"Kubernetes \uc0ac\uc6a9 \uc2dc AWS, GCP, Azure \uc640 \uac19\uc740 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc5d0\uc11c\ub294 \uc790\uccb4\uc801\uc73c\ub85c \ub85c\ub4dc \ubca8\ub7f0\uc11c(Load Balancer)\ub97c \uc81c\uacf5\ud574 \uc8fc\uc9c0\ub9cc, \uc628\ud504\ub808\ubbf8\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ubaa8\ub4c8\uc744 \ucd94\uac00\uc801\uc73c\ub85c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB"),"\ub294 \ubca0\uc5b4\uba54\ud0c8 \ud658\uacbd\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc11c\ub97c \uc81c\uacf5\ud558\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub85c\uc81d\ud2b8 \uc785\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"\uc694\uad6c\uc0ac\ud56d"},"\uc694\uad6c\uc0ac\ud56d"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"\uc694\uad6c \uc0ac\ud56d"),(0,l.kt)("th",{parentName:"tr",align:null},"\ubc84\uc804 \ubc0f \ub0b4\uc6a9"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"\ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc774 \uc5c6\ub294 >= v1.13.0")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"\ud638\ud658\uac00\ub2a5\ud55c \ub124\ud2b8\uc6cc\ud06c CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 \uc8fc\uc18c"),(0,l.kt)("td",{parentName:"tr",align:null},"MetalLB \ubc30\ud3ec\uc5d0 \uc0ac\uc6a9")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0"),(0,l.kt)("td",{parentName:"tr",align:null},"BGP \uae30\ub2a5\uc744 \uc9c0\uc6d0\ud558\ub294 \ud558\ub098 \uc774\uc0c1\uc758 \ub77c\uc6b0\ud130")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"\ub178\ub4dc \uac04 \ud3ec\ud2b8 TCP/UDP 7946 \uc624\ud508"),(0,l.kt)("td",{parentName:"tr",align:null},"memberlist \uc694\uad6c \uc0ac\ud56d")))),(0,l.kt)("h2",{id:"metallb-\uc124\uce58"},"MetalLB \uc124\uce58"),(0,l.kt)("h3",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"IPVS \ubaa8\ub4dc\uc5d0\uc11c kube-proxy\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0 Kubernetes v1.14.2 \uc774\ud6c4\ubd80\ud130\ub294 \uc5c4\uaca9\ud55c ARP(strictARP) \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud558\ub3c4\ub85d \uc124\uc815\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Kube-router\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc5c4\uaca9\ud55c ARP\ub97c \ud65c\uc131\ud654\ud558\ubbc0\ub85c \uc11c\ube44\uc2a4 \ud504\ub85d\uc2dc\ub85c \uc0ac\uc6a9\ud560 \uacbd\uc6b0\uc5d0\ub294 \uc774 \uae30\ub2a5\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc5c4\uaca9\ud55c ARP \ubaa8\ub4dc\ub97c \uc801\uc6a9\ud558\uae30\uc5d0 \uc55e\uc11c, \ud604\uc7ac \ubaa8\ub4dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"strictARP: false \uac00 \ucd9c\ub825\ub418\ub294 \uacbd\uc6b0 \ub2e4\uc74c\uc744 \uc2e4\ud589\ud558\uc5ec strictARP: true\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4.\n(strictARP: true\uac00 \uc774\ubbf8 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \uc218\ud589\ud558\uc9c0 \uc54a\uc73c\uc154\ub3c4 \ub429\ub2c8\ub2e4.)"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"\uc124\uce58---manifest"},"\uc124\uce58 - Manifest"),(0,l.kt)("h4",{id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4"},"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,l.kt)("p",null,"metallb-system namespace \uc758 2 \uac1c\uc758 pod \uc774 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"\ub9e4\ub2c8\ud398\uc2a4\ud2b8\uc758 \uad6c\uc131 \uc694\uc18c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"deployment \ub85c \ubc30\ud3ec\ub418\uba70, \ub85c\ub4dc \ubca8\ub7f0\uc2f1\uc744 \uc218\ud589\ud560 external IP \uc8fc\uc18c\uc758 \ud560\ub2f9\uc744 \ucc98\ub9ac\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"daemonset \ud615\ud0dc\ub85c \ubc30\ud3ec\ub418\uba70, \uc678\ubd80 \ud2b8\ub798\ud53d\uacfc \uc11c\ube44\uc2a4\ub97c \uc5f0\uacb0\ud574 \ub124\ud2b8\uc6cc\ud06c \ud1b5\uc2e0\uc774 \uac00\ub2a5\ud558\ub3c4\ub85d \uad6c\uc131\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4.")))),(0,l.kt)("p",null,"\uc11c\ube44\uc2a4\uc5d0\ub294 \ucee8\ud2b8\ub864\ub7ec \ubc0f \uc2a4\ud53c\ucee4\uc640 \uad6c\uc131 \uc694\uc18c\uac00 \uc791\ub3d9\ud558\ub294 \ub370 \ud544\uc694\ud55c RBAC \uc0ac\uc6a9 \uad8c\ud55c\uc774 \ud3ec\ud568\ub429\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"MetalLB \uc758 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uc815\ucc45 \uc124\uc815\uc740 \uad00\ub828 \uc124\uc815 \uc815\ubcf4\ub97c \ub2f4\uc740 configmap \uc744 \ubc30\ud3ec\ud558\uc5ec \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"MetalLB \uc5d0\uc11c \uad6c\uc131\ud560 \uc218 \uc788\ub294 \ubaa8\ub4dc\ub85c\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 \ubaa8\ub4dc")),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP \ubaa8\ub4dc"))),(0,l.kt)("p",null,"\uc5ec\uae30\uc5d0\uc11c\ub294 Layer 2 \ubaa8\ub4dc\ub85c \uc9c4\ud589\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"Layer 2 \ubaa8\ub4dc\ub294 \uac04\ub2e8\ud558\uac8c \uc0ac\uc6a9\ud560 IP \uc8fc\uc18c\uc758 \ub300\uc5ed\ub9cc \uc124\uc815\ud558\uba74 \ub429\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Layer 2 \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc6cc\ucee4 \ub178\ub4dc\uc758 \ub124\ud2b8\uc6cc\ud06c \uc778\ud130\ud398\uc774\uc2a4\uc5d0 IP\ub97c \ubc14\uc778\ub529 \ud558\uc9c0 \uc54a\uc544\ub3c4 \ub418\ub294\ub370 \ub85c\uceec \ub124\ud2b8\uc6cc\ud06c\uc758 ARP \uc694\uccad\uc5d0 \uc9c1\uc811 \uc751\ub2f5\ud558\uc5ec \ucef4\ud4e8\ud130\uc758 MAC\uc8fc\uc18c\ub97c \ud074\ub77c\uc774\uc5b8\ud2b8\uc5d0 \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc73c\ub85c \uc791\ub3d9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub2e4\uc74c ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," \ud30c\uc77c\uc740 MetalLB \uac00 192.168.35.100 ~ 192.168.35.110\uc758 IP\uc5d0 \ub300\ud55c \uc81c\uc5b4 \uad8c\ud55c\uc744 \uc81c\uacf5\ud558\uace0 Layer 2 \ubaa8\ub4dc\ub97c \uad6c\uc131\ud558\ub294 \uc124\uc815\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, 192.168.35.100 ~ 192.168.35.110 \ub300\uc5ed\uc774 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc640 \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc \ubaa8\ub450 \uc811\uadfc \uac00\ub2a5\ud55c \ub300\uc5ed\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc124\uc815\uc744 \uc801\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"metallb-\uc0ac\uc6a9"},"MetalLB \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 kubeflow\uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 istio-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 istio-ingressgateway \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer"),"\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100")," \uc778 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:n(5556).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 minio \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 kubeflow \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 minio-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:n(6589).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 mlflow \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 mlflow-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 mlflow-server-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:n(5864).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 Grafana \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 seldon-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 seldon-core-analytics-grafana \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:n(2291).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},2291:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},5556:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},6589:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},5864:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[860],{3905:(e,a,n)=>{n.d(a,{Zo:()=>c,kt:()=>k});var t=n(7294);function l(e,a,n){return a in e?Object.defineProperty(e,a,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[a]=n,e}function r(e,a){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);a&&(t=t.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),n.push.apply(n,t)}return n}function s(e){for(var a=1;a=0||(l[n]=e[n]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(l[n]=e[n])}return l}var i=t.createContext({}),p=function(e){var a=t.useContext(i),n=a;return e&&(n="function"==typeof e?e(a):s(s({},a),e)),n},c=function(e){var a=p(e.components);return t.createElement(i.Provider,{value:a},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return t.createElement(t.Fragment,{},a)}},d=t.forwardRef((function(e,a){var n=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(n),d=l,k=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return n?t.createElement(k,s(s({ref:a},c),{},{components:n})):t.createElement(k,s({ref:a},c))}));function k(e,a){var n=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=n.length,s=new Array(r);s[0]=d;var o={};for(var i in a)hasOwnProperty.call(a,i)&&(o[i]=a[i]);o.originalType=e,o[m]="string"==typeof e?e:l,s[1]=o;for(var p=2;p{n.r(a),n.d(a,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var t=n(7462),l=(n(7294),n(3905));const r={title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},s=void 0,o={unversionedId:"appendix/metallb",id:"version-1.0/appendix/metallb",title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",description:"MetalLB\ub780?",source:"@site/versioned_docs/version-1.0/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/docs/1.0/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/metallb.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"2023\ub144 8\uc6d4 18\uc77c",sidebarPosition:2,frontMatter:{title:"2. Bare Metal \ud074\ub7ec\uc2a4\ud130\uc6a9 load balancer metallb \uc124\uce58",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Python \uac00\uc0c1\ud658\uacbd \uc124\uce58",permalink:"/docs/1.0/appendix/pyenv"},next:{title:"\ub2e4\ub8e8\uc9c0 \ubabb\ud55c \uac83\ub4e4",permalink:"/docs/1.0/further-readings/info"}},i={},p=[{value:"MetalLB\ub780?",id:"metallb\ub780",level:2},{value:"\uc694\uad6c\uc0ac\ud56d",id:"\uc694\uad6c\uc0ac\ud56d",level:2},{value:"MetalLB \uc124\uce58",id:"metallb-\uc124\uce58",level:2},{value:"Preparation",id:"preparation",level:3},{value:"\uc124\uce58 - Manifest",id:"\uc124\uce58---manifest",level:3},{value:"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4.",id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4",level:4},{value:"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778",id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"MetalLB \uc0ac\uc6a9",id:"metallb-\uc0ac\uc6a9",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},m="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(m,(0,t.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"metallb\ub780"},"MetalLB\ub780?"),(0,l.kt)("p",null,"Kubernetes \uc0ac\uc6a9 \uc2dc AWS, GCP, Azure \uc640 \uac19\uc740 \ud074\ub77c\uc6b0\ub4dc \ud50c\ub7ab\ud3fc\uc5d0\uc11c\ub294 \uc790\uccb4\uc801\uc73c\ub85c \ub85c\ub4dc \ubca8\ub7f0\uc11c(Load Balancer)\ub97c \uc81c\uacf5\ud574 \uc8fc\uc9c0\ub9cc, \uc628\ud504\ub808\ubbf8\uc2a4 \ud074\ub7ec\uc2a4\ud130\uc5d0\uc11c\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ud558\ub294 \ubaa8\ub4c8\uc744 \ucd94\uac00\uc801\uc73c\ub85c \uc124\uce58\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB"),"\ub294 \ubca0\uc5b4\uba54\ud0c8 \ud658\uacbd\uc5d0\uc11c \uc0ac\uc6a9\ud560 \uc218 \uc788\ub294 \ub85c\ub4dc \ubca8\ub7f0\uc11c\ub97c \uc81c\uacf5\ud558\ub294 \uc624\ud508\uc18c\uc2a4 \ud504\ub85c\uc81d\ud2b8 \uc785\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"\uc694\uad6c\uc0ac\ud56d"},"\uc694\uad6c\uc0ac\ud56d"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"\uc694\uad6c \uc0ac\ud56d"),(0,l.kt)("th",{parentName:"tr",align:null},"\ubc84\uc804 \ubc0f \ub0b4\uc6a9"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"\ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc774 \uc5c6\ub294 >= v1.13.0")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"\ud638\ud658\uac00\ub2a5\ud55c \ub124\ud2b8\uc6cc\ud06c CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 \uc8fc\uc18c"),(0,l.kt)("td",{parentName:"tr",align:null},"MetalLB \ubc30\ud3ec\uc5d0 \uc0ac\uc6a9")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0"),(0,l.kt)("td",{parentName:"tr",align:null},"BGP \uae30\ub2a5\uc744 \uc9c0\uc6d0\ud558\ub294 \ud558\ub098 \uc774\uc0c1\uc758 \ub77c\uc6b0\ud130")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"\ub178\ub4dc \uac04 \ud3ec\ud2b8 TCP/UDP 7946 \uc624\ud508"),(0,l.kt)("td",{parentName:"tr",align:null},"memberlist \uc694\uad6c \uc0ac\ud56d")))),(0,l.kt)("h2",{id:"metallb-\uc124\uce58"},"MetalLB \uc124\uce58"),(0,l.kt)("h3",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"IPVS \ubaa8\ub4dc\uc5d0\uc11c kube-proxy\ub97c \uc0ac\uc6a9\ud558\ub294 \uacbd\uc6b0 Kubernetes v1.14.2 \uc774\ud6c4\ubd80\ud130\ub294 \uc5c4\uaca9\ud55c ARP(strictARP) \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud558\ub3c4\ub85d \uc124\uc815\ud574\uc57c \ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Kube-router\ub294 \uae30\ubcf8\uc801\uc73c\ub85c \uc5c4\uaca9\ud55c ARP\ub97c \ud65c\uc131\ud654\ud558\ubbc0\ub85c \uc11c\ube44\uc2a4 \ud504\ub85d\uc2dc\ub85c \uc0ac\uc6a9\ud560 \uacbd\uc6b0\uc5d0\ub294 \uc774 \uae30\ub2a5\uc774 \ud544\uc694\ud558\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\uc5c4\uaca9\ud55c ARP \ubaa8\ub4dc\ub97c \uc801\uc6a9\ud558\uae30\uc5d0 \uc55e\uc11c, \ud604\uc7ac \ubaa8\ub4dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"strictARP: false \uac00 \ucd9c\ub825\ub418\ub294 \uacbd\uc6b0 \ub2e4\uc74c\uc744 \uc2e4\ud589\ud558\uc5ec strictARP: true\ub85c \ubcc0\uacbd\ud569\ub2c8\ub2e4.\n(strictARP: true\uac00 \uc774\ubbf8 \ucd9c\ub825\ub41c\ub2e4\uba74 \ub2e4\uc74c \ucee4\ub9e8\ub4dc\ub97c \uc218\ud589\ud558\uc9c0 \uc54a\uc73c\uc154\ub3c4 \ub429\ub2c8\ub2e4.)"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \uc218\ud589\ub418\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"\uc124\uce58---manifest"},"\uc124\uce58 - Manifest"),(0,l.kt)("h4",{id:"1-metallb-\ub97c-\uc124\uce58\ud569\ub2c8\ub2e4"},"1. MetalLB \ub97c \uc124\uce58\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-\uc815\uc0c1-\uc124\uce58-\ud655\uc778"},"2. \uc815\uc0c1 \uc124\uce58 \ud655\uc778"),(0,l.kt)("p",null,"metallb-system namespace \uc758 2 \uac1c\uc758 pod \uc774 \ubaa8\ub450 Running \uc774 \ub420 \ub54c\uae4c\uc9c0 \uae30\ub2e4\ub9bd\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"\ubaa8\ub450 Running \uc774 \ub418\uba74 \ub2e4\uc74c\uacfc \ube44\uc2b7\ud55c \uacb0\uacfc\uac00 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"\ub9e4\ub2c8\ud398\uc2a4\ud2b8\uc758 \uad6c\uc131 \uc694\uc18c\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"deployment \ub85c \ubc30\ud3ec\ub418\uba70, \ub85c\ub4dc \ubca8\ub7f0\uc2f1\uc744 \uc218\ud589\ud560 external IP \uc8fc\uc18c\uc758 \ud560\ub2f9\uc744 \ucc98\ub9ac\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"daemonset \ud615\ud0dc\ub85c \ubc30\ud3ec\ub418\uba70, \uc678\ubd80 \ud2b8\ub798\ud53d\uacfc \uc11c\ube44\uc2a4\ub97c \uc5f0\uacb0\ud574 \ub124\ud2b8\uc6cc\ud06c \ud1b5\uc2e0\uc774 \uac00\ub2a5\ud558\ub3c4\ub85d \uad6c\uc131\ud558\ub294 \uc5ed\ud560\uc744 \ub2f4\ub2f9\ud569\ub2c8\ub2e4.")))),(0,l.kt)("p",null,"\uc11c\ube44\uc2a4\uc5d0\ub294 \ucee8\ud2b8\ub864\ub7ec \ubc0f \uc2a4\ud53c\ucee4\uc640 \uad6c\uc131 \uc694\uc18c\uac00 \uc791\ub3d9\ud558\ub294 \ub370 \ud544\uc694\ud55c RBAC \uc0ac\uc6a9 \uad8c\ud55c\uc774 \ud3ec\ud568\ub429\ub2c8\ub2e4."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"MetalLB \uc758 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uc815\ucc45 \uc124\uc815\uc740 \uad00\ub828 \uc124\uc815 \uc815\ubcf4\ub97c \ub2f4\uc740 configmap \uc744 \ubc30\ud3ec\ud558\uc5ec \uc124\uc815\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("p",null,"MetalLB \uc5d0\uc11c \uad6c\uc131\ud560 \uc218 \uc788\ub294 \ubaa8\ub4dc\ub85c\ub294 \ub2e4\uc74c\uacfc \uac19\uc774 2\uac00\uc9c0\uac00 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 \ubaa8\ub4dc")),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP \ubaa8\ub4dc"))),(0,l.kt)("p",null,"\uc5ec\uae30\uc5d0\uc11c\ub294 Layer 2 \ubaa8\ub4dc\ub85c \uc9c4\ud589\ud558\uaca0\uc2b5\ub2c8\ub2e4."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"Layer 2 \ubaa8\ub4dc\ub294 \uac04\ub2e8\ud558\uac8c \uc0ac\uc6a9\ud560 IP \uc8fc\uc18c\uc758 \ub300\uc5ed\ub9cc \uc124\uc815\ud558\uba74 \ub429\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","Layer 2 \ubaa8\ub4dc\ub97c \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \uc6cc\ucee4 \ub178\ub4dc\uc758 \ub124\ud2b8\uc6cc\ud06c \uc778\ud130\ud398\uc774\uc2a4\uc5d0 IP\ub97c \ubc14\uc778\ub529 \ud558\uc9c0 \uc54a\uc544\ub3c4 \ub418\ub294\ub370 \ub85c\uceec \ub124\ud2b8\uc6cc\ud06c\uc758 ARP \uc694\uccad\uc5d0 \uc9c1\uc811 \uc751\ub2f5\ud558\uc5ec \ucef4\ud4e8\ud130\uc758 MAC\uc8fc\uc18c\ub97c \ud074\ub77c\uc774\uc5b8\ud2b8\uc5d0 \uc81c\uacf5\ud558\ub294 \ubc29\uc2dd\uc73c\ub85c \uc791\ub3d9\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ub2e4\uc74c ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," \ud30c\uc77c\uc740 MetalLB \uac00 192.168.35.100 ~ 192.168.35.110\uc758 IP\uc5d0 \ub300\ud55c \uc81c\uc5b4 \uad8c\ud55c\uc744 \uc81c\uacf5\ud558\uace0 Layer 2 \ubaa8\ub4dc\ub97c \uad6c\uc131\ud558\ub294 \uc124\uc815\uc785\ub2c8\ub2e4."),(0,l.kt)("p",null,"\ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc\uc640 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uac00 \ubd84\ub9ac\ub41c \uacbd\uc6b0, 192.168.35.100 ~ 192.168.35.110 \ub300\uc5ed\uc774 \ud074\ub77c\uc774\uc5b8\ud2b8 \ub178\ub4dc\uc640 \ud074\ub7ec\uc2a4\ud130 \ub178\ub4dc \ubaa8\ub450 \uc811\uadfc \uac00\ub2a5\ud55c \ub300\uc5ed\uc774\uc5b4\uc57c \ud569\ub2c8\ub2e4."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"\uc704\uc758 \uc124\uc815\uc744 \uc801\uc6a9\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"\uc815\uc0c1\uc801\uc73c\ub85c \ubc30\ud3ec\ud558\uba74 \ub2e4\uc74c\uacfc \uac19\uc774 \ucd9c\ub825\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"metallb-\uc0ac\uc6a9"},"MetalLB \uc0ac\uc6a9"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 kubeflow\uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 istio-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 istio-ingressgateway \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer"),"\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100")," \uc778 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:n(5556).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 minio \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 kubeflow \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 minio-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:n(6589).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 mlflow \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 mlflow-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 mlflow-server-service \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:n(5864).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"\uba3c\uc800 Grafana \uc758 Dashboard \ub97c \uc81c\uacf5\ud558\ub294 seldon-system \ub124\uc784\uc2a4\ud398\uc774\uc2a4\uc758 seldon-core-analytics-grafana \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc744 LoadBalancer\ub85c \ubcc0\uacbd\ud558\uc5ec MetalLB\ub85c\ubd80\ud130 \ub85c\ub4dc \ubca8\ub7f0\uc2f1 \uae30\ub2a5\uc744 \uc81c\uacf5\ubc1b\uae30 \uc804\uc5d0, \ud604\uc7ac \uc0c1\ud0dc\ub97c \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"\ud574\ub2f9 \uc11c\ube44\uc2a4\uc758 \ud0c0\uc785\uc740 ClusterIP\uc774\uba70, External-IP \uac12\uc740 ",(0,l.kt)("inlineCode",{parentName:"p"},"none")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"type \uc744 LoadBalancer \ub85c \ubcc0\uacbd\ud558\uace0 \uc6d0\ud558\ub294 IP \uc8fc\uc18c\ub97c \uc785\ub825\ud558\uace0 \uc2f6\uc740 \uacbd\uc6b0 loadBalancerIP \ud56d\ubaa9\uc744 \ucd94\uac00\ud569\ub2c8\ub2e4.",(0,l.kt)("br",{parentName:"p"}),"\n","\ucd94\uac00 \ud558\uc9c0 \uc54a\uc744 \uacbd\uc6b0\uc5d0\ub294 \uc704\uc5d0\uc11c \uc124\uc815\ud55c IP \uc8fc\uc18c\ud480\uc5d0\uc11c \uc21c\ucc28\uc801\uc73c\ub85c IP \uc8fc\uc18c\uac00 \ubc30\uc815\ub429\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"\ub2e4\uc2dc \ud655\uc778\uc744 \ud574\ubcf4\uba74 External-IP \uac12\uc774 ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103")," \uc778 \uac83\uc744 \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Web Browser \ub97c \uc5f4\uc5b4 ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," \uc73c\ub85c \uc811\uc18d\ud558\uc5ec, \ub2e4\uc74c\uacfc \uac19\uc740 \ud654\uba74\uc774 \ucd9c\ub825\ub418\ub294 \uac83\uc744 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:n(2291).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},2291:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},5556:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},6589:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},5864:(e,a,n)=>{n.d(a,{Z:()=>t});const t=n.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file diff --git a/assets/js/runtime~main.0f29a9d4.js b/assets/js/runtime~main.11b636d4.js similarity index 57% rename from assets/js/runtime~main.0f29a9d4.js rename to assets/js/runtime~main.11b636d4.js index 05bb9bd7..024727e4 100644 --- a/assets/js/runtime~main.0f29a9d4.js +++ b/assets/js/runtime~main.11b636d4.js @@ -1 +1 @@ -(()=>{"use strict";var e,d,a,b,f,c={},t={};function r(e){var d=t[e];if(void 0!==d)return d.exports;var a=t[e]={id:e,loaded:!1,exports:{}};return c[e].call(a.exports,a,a.exports,r),a.loaded=!0,a.exports}r.m=c,r.c=t,e=[],r.O=(d,a,b,f)=>{if(!a){var c=1/0;for(i=0;i=f)&&Object.keys(r.O).every((e=>r.O[e](a[o])))?a.splice(o--,1):(t=!1,f0&&e[i-1][2]>f;i--)e[i]=e[i-1];e[i]=[a,b,f]},r.n=e=>{var d=e&&e.__esModule?()=>e.default:()=>e;return r.d(d,{a:d}),d},a=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,r.t=function(e,b){if(1&b&&(e=this(e)),8&b)return e;if("object"==typeof e&&e){if(4&b&&e.__esModule)return e;if(16&b&&"function"==typeof e.then)return e}var f=Object.create(null);r.r(f);var c={};d=d||[null,a({}),a([]),a(a)];for(var t=2&b&&e;"object"==typeof t&&!~d.indexOf(t);t=a(t))Object.getOwnPropertyNames(t).forEach((d=>c[d]=()=>e[d]));return c.default=()=>e,r.d(f,c),f},r.d=(e,d)=>{for(var a in d)r.o(d,a)&&!r.o(e,a)&&Object.defineProperty(e,a,{enumerable:!0,get:d[a]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce(((d,a)=>(r.f[a](e,d),d)),[])),r.u=e=>"assets/js/"+({53:"4a09dd96",200:"2a07449d",204:"e46e340c",434:"13e7227b",560:"b91e83a7",728:"6246222d",797:"b93cd888",799:"e7600b97",822:"91e4f63c",860:"fc8ffda8",956:"b1ad0a9e",1008:"9b54c487",1017:"84c20269",1044:"8061fab4",1427:"facca37e",1526:"e8d17a59",1612:"29c7a46b",1725:"9a73b948",1800:"c07e8a62",1804:"c58e39e2",1948:"b0739f8c",2157:"9f898b75",2197:"935f2afb",2335:"d9cdcec8",2349:"6016bee0",2375:"5b3cd5ae",2416:"01a9496d",2429:"d0b69af8",2508:"c1242cde",2527:"bcbfd5bd",2603:"68f81397",2610:"5dc48d01",2657:"352e0155",2676:"7ef46b74",2725:"541347e5",2779:"cf706b7e",2803:"7bb5633a",2949:"089e5a41",2996:"ae95ad8d",3085:"1f391b9e",3225:"d5c893db",3237:"1df93b7f",3255:"a7958b24",3457:"affd256f",3641:"f28dfc8e",3684:"c430b8da",3774:"4a2e0471",3870:"2d9584e5",3872:"4fc796a7",4023:"44d64813",4119:"317f9d80",4177:"167d5ab1",4297:"5523074d",4299:"88b38b2b",4555:"a606c19a",4775:"6d8a40f9",4818:"b3824f13",5040:"e2bc44c1",5105:"6fb0e7c9",5127:"e070c0d8",5271:"e3a88667",5384:"0425fa84",5435:"3f2d0791",5597:"b0207dc0",5604:"8c6322ce",5642:"bb396da7",5658:"92059b0a",5717:"b1f93f8c",5787:"8b8d160d",5825:"c4afae5c",5865:"36614f1b",5867:"9c5e90dd",6012:"97a152fd",6052:"36fd762b",6358:"272bb263",6397:"165f2d18",6540:"c8feb4f8",6572:"1671dd43",6639:"c1115317",6643:"38642333",6661:"bf383222",6742:"4e3fc5d7",6746:"b2277d04",7345:"d9ed3de0",7346:"d3303310",7367:"1e99a105",7414:"393be207",7549:"2369f063",7616:"306a8c6c",7651:"48eb1972",7918:"17896441",8084:"f1d05694",8225:"d7dc9408",8231:"8999d56c",8309:"d7d2d94a",8425:"8dd8b1e3",8449:"6b57b422",8457:"6b7916cd",8657:"e287e96b",8677:"3546d36b",8693:"57b26f6a",8737:"4579a793",8834:"fb803674",8837:"39b2b572",8860:"d6a8d944",8932:"751a793c",9229:"fb6a8c4a",9325:"c37072e4",9398:"be2f486c",9514:"1be78505",9558:"2bbcffe4",9683:"838277dc",9700:"f84c40fb",9925:"15cf2139"}[e]||e)+"."+{53:"a626411c",200:"2be45199",204:"92f69db7",434:"64afb4b7",560:"53320bf2",728:"56e22807",797:"97e0c552",799:"26de1f81",822:"2b363c62",860:"d92bd484",956:"820522ff",1008:"b0cb9abb",1017:"4edb664d",1044:"61253720",1427:"39b99df2",1526:"7d4f0c66",1612:"7c918b5d",1725:"e2700338",1800:"232aef8c",1804:"9f5738d0",1948:"fa9d9f93",2157:"fbac3ee7",2197:"ecdd8cb0",2335:"ed342008",2349:"43321011",2375:"1cea74eb",2416:"d6f459d0",2429:"0e280640",2508:"58087205",2527:"4f89a53d",2603:"d55909e0",2610:"db2a5c35",2657:"6e598cc9",2676:"f1dbfdee",2725:"142ae047",2779:"8f1b64d7",2803:"27b2c850",2949:"71e49dd8",2996:"e2d19c21",3085:"50df1a02",3225:"4c4cecc1",3237:"c4fb40a2",3255:"56332bed",3457:"1933e19e",3641:"0caaaad0",3684:"4b2e9285",3774:"ea979e3b",3870:"dc51ccf7",3872:"55c8ebbe",4023:"26178d88",4119:"a0e6c39b",4177:"411bad63",4297:"2f9c6695",4299:"0eb7b3c5",4555:"dc487a08",4775:"9b685874",4818:"eb8c4c60",4972:"9218459b",5040:"d97e1202",5105:"531715dd",5127:"090c99cc",5271:"77ba9c0f",5384:"ff62a655",5435:"1dcf3050",5597:"2416ce84",5604:"cc644628",5642:"fad17c4c",5658:"f442418e",5717:"0d068ec2",5787:"f2d83f5a",5825:"db1f7d62",5865:"9e4f3599",5867:"bf776eaf",6012:"bc56892d",6052:"66852ff2",6358:"94fb9f09",6397:"ff4ce9f2",6540:"27abc486",6572:"ea405406",6639:"fc7a8656",6643:"6757cffa",6661:"8ae7415e",6742:"05108ede",6746:"08ea1dcd",7345:"369633af",7346:"0bd8ebb4",7367:"d6e8b39f",7414:"9652e478",7549:"92f4c018",7616:"7fc18856",7651:"0629ff22",7918:"d8f6f22a",8084:"ab6b7e87",8225:"46b9e217",8231:"daf73b12",8309:"26ef7de0",8425:"be1bde8a",8449:"056e4749",8457:"a6c9252d",8657:"1c29974a",8677:"9935fec7",8693:"a561200f",8737:"052cd2c0",8834:"1ede9e09",8837:"20978a65",8860:"4e2a1683",8932:"78750b3e",9229:"270cbf4d",9325:"06249857",9398:"a11ca60b",9455:"bfee0bcc",9514:"f046b65b",9558:"f78a9013",9683:"cec0c92e",9700:"27e7ffdf",9925:"fd4e630f"}[e]+".js",r.miniCssF=e=>{},r.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),r.o=(e,d)=>Object.prototype.hasOwnProperty.call(e,d),b={},f="v-2:",r.l=(e,d,a,c)=>{if(b[e])b[e].push(d);else{var t,o;if(void 0!==a)for(var n=document.getElementsByTagName("script"),i=0;i{t.onerror=t.onload=null,clearTimeout(s);var f=b[e];if(delete b[e],t.parentNode&&t.parentNode.removeChild(t),f&&f.forEach((e=>e(a))),d)return d(a)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:t}),12e4);t.onerror=l.bind(null,t.onerror),t.onload=l.bind(null,t.onload),o&&document.head.appendChild(t)}},r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.p="/",r.gca=function(e){return e={17896441:"7918",38642333:"6643","4a09dd96":"53","2a07449d":"200",e46e340c:"204","13e7227b":"434",b91e83a7:"560","6246222d":"728",b93cd888:"797",e7600b97:"799","91e4f63c":"822",fc8ffda8:"860",b1ad0a9e:"956","9b54c487":"1008","84c20269":"1017","8061fab4":"1044",facca37e:"1427",e8d17a59:"1526","29c7a46b":"1612","9a73b948":"1725",c07e8a62:"1800",c58e39e2:"1804",b0739f8c:"1948","9f898b75":"2157","935f2afb":"2197",d9cdcec8:"2335","6016bee0":"2349","5b3cd5ae":"2375","01a9496d":"2416",d0b69af8:"2429",c1242cde:"2508",bcbfd5bd:"2527","68f81397":"2603","5dc48d01":"2610","352e0155":"2657","7ef46b74":"2676","541347e5":"2725",cf706b7e:"2779","7bb5633a":"2803","089e5a41":"2949",ae95ad8d:"2996","1f391b9e":"3085",d5c893db:"3225","1df93b7f":"3237",a7958b24:"3255",affd256f:"3457",f28dfc8e:"3641",c430b8da:"3684","4a2e0471":"3774","2d9584e5":"3870","4fc796a7":"3872","44d64813":"4023","317f9d80":"4119","167d5ab1":"4177","5523074d":"4297","88b38b2b":"4299",a606c19a:"4555","6d8a40f9":"4775",b3824f13:"4818",e2bc44c1:"5040","6fb0e7c9":"5105",e070c0d8:"5127",e3a88667:"5271","0425fa84":"5384","3f2d0791":"5435",b0207dc0:"5597","8c6322ce":"5604",bb396da7:"5642","92059b0a":"5658",b1f93f8c:"5717","8b8d160d":"5787",c4afae5c:"5825","36614f1b":"5865","9c5e90dd":"5867","97a152fd":"6012","36fd762b":"6052","272bb263":"6358","165f2d18":"6397",c8feb4f8:"6540","1671dd43":"6572",c1115317:"6639",bf383222:"6661","4e3fc5d7":"6742",b2277d04:"6746",d9ed3de0:"7345",d3303310:"7346","1e99a105":"7367","393be207":"7414","2369f063":"7549","306a8c6c":"7616","48eb1972":"7651",f1d05694:"8084",d7dc9408:"8225","8999d56c":"8231",d7d2d94a:"8309","8dd8b1e3":"8425","6b57b422":"8449","6b7916cd":"8457",e287e96b:"8657","3546d36b":"8677","57b26f6a":"8693","4579a793":"8737",fb803674:"8834","39b2b572":"8837",d6a8d944:"8860","751a793c":"8932",fb6a8c4a:"9229",c37072e4:"9325",be2f486c:"9398","1be78505":"9514","2bbcffe4":"9558","838277dc":"9683",f84c40fb:"9700","15cf2139":"9925"}[e]||e,r.p+r.u(e)},(()=>{var e={1303:0,532:0};r.f.j=(d,a)=>{var b=r.o(e,d)?e[d]:void 0;if(0!==b)if(b)a.push(b[2]);else if(/^(1303|532)$/.test(d))e[d]=0;else{var f=new Promise(((a,f)=>b=e[d]=[a,f]));a.push(b[2]=f);var c=r.p+r.u(d),t=new Error;r.l(c,(a=>{if(r.o(e,d)&&(0!==(b=e[d])&&(e[d]=void 0),b)){var f=a&&("load"===a.type?"missing":a.type),c=a&&a.target&&a.target.src;t.message="Loading chunk "+d+" failed.\n("+f+": "+c+")",t.name="ChunkLoadError",t.type=f,t.request=c,b[1](t)}}),"chunk-"+d,d)}},r.O.j=d=>0===e[d];var d=(d,a)=>{var b,f,c=a[0],t=a[1],o=a[2],n=0;if(c.some((d=>0!==e[d]))){for(b in t)r.o(t,b)&&(r.m[b]=t[b]);if(o)var i=o(r)}for(d&&d(a);n{"use strict";var e,d,a,c,b,f={},t={};function r(e){var d=t[e];if(void 0!==d)return d.exports;var a=t[e]={id:e,loaded:!1,exports:{}};return f[e].call(a.exports,a,a.exports,r),a.loaded=!0,a.exports}r.m=f,r.c=t,e=[],r.O=(d,a,c,b)=>{if(!a){var f=1/0;for(i=0;i=b)&&Object.keys(r.O).every((e=>r.O[e](a[o])))?a.splice(o--,1):(t=!1,b0&&e[i-1][2]>b;i--)e[i]=e[i-1];e[i]=[a,c,b]},r.n=e=>{var d=e&&e.__esModule?()=>e.default:()=>e;return r.d(d,{a:d}),d},a=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,r.t=function(e,c){if(1&c&&(e=this(e)),8&c)return e;if("object"==typeof e&&e){if(4&c&&e.__esModule)return e;if(16&c&&"function"==typeof e.then)return e}var b=Object.create(null);r.r(b);var f={};d=d||[null,a({}),a([]),a(a)];for(var t=2&c&&e;"object"==typeof t&&!~d.indexOf(t);t=a(t))Object.getOwnPropertyNames(t).forEach((d=>f[d]=()=>e[d]));return f.default=()=>e,r.d(b,f),b},r.d=(e,d)=>{for(var a in d)r.o(d,a)&&!r.o(e,a)&&Object.defineProperty(e,a,{enumerable:!0,get:d[a]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce(((d,a)=>(r.f[a](e,d),d)),[])),r.u=e=>"assets/js/"+({53:"4a09dd96",200:"2a07449d",204:"e46e340c",434:"13e7227b",560:"b91e83a7",728:"6246222d",797:"b93cd888",799:"e7600b97",822:"91e4f63c",860:"fc8ffda8",956:"b1ad0a9e",1008:"9b54c487",1017:"84c20269",1044:"8061fab4",1427:"facca37e",1526:"e8d17a59",1612:"29c7a46b",1725:"9a73b948",1800:"c07e8a62",1804:"c58e39e2",1948:"b0739f8c",2157:"9f898b75",2197:"935f2afb",2335:"d9cdcec8",2349:"6016bee0",2375:"5b3cd5ae",2416:"01a9496d",2429:"d0b69af8",2508:"c1242cde",2527:"bcbfd5bd",2603:"68f81397",2610:"5dc48d01",2657:"352e0155",2676:"7ef46b74",2725:"541347e5",2779:"cf706b7e",2803:"7bb5633a",2949:"089e5a41",2996:"ae95ad8d",3085:"1f391b9e",3225:"d5c893db",3237:"1df93b7f",3255:"a7958b24",3457:"affd256f",3641:"f28dfc8e",3684:"c430b8da",3774:"4a2e0471",3870:"2d9584e5",3872:"4fc796a7",4023:"44d64813",4119:"317f9d80",4177:"167d5ab1",4297:"5523074d",4299:"88b38b2b",4555:"a606c19a",4775:"6d8a40f9",4818:"b3824f13",5040:"e2bc44c1",5105:"6fb0e7c9",5127:"e070c0d8",5271:"e3a88667",5384:"0425fa84",5435:"3f2d0791",5597:"b0207dc0",5604:"8c6322ce",5642:"bb396da7",5658:"92059b0a",5717:"b1f93f8c",5787:"8b8d160d",5825:"c4afae5c",5865:"36614f1b",5867:"9c5e90dd",6012:"97a152fd",6052:"36fd762b",6358:"272bb263",6397:"165f2d18",6540:"c8feb4f8",6572:"1671dd43",6639:"c1115317",6643:"38642333",6661:"bf383222",6742:"4e3fc5d7",6746:"b2277d04",7345:"d9ed3de0",7346:"d3303310",7367:"1e99a105",7414:"393be207",7549:"2369f063",7616:"306a8c6c",7651:"48eb1972",7918:"17896441",8084:"f1d05694",8225:"d7dc9408",8231:"8999d56c",8309:"d7d2d94a",8425:"8dd8b1e3",8449:"6b57b422",8457:"6b7916cd",8657:"e287e96b",8677:"3546d36b",8693:"57b26f6a",8737:"4579a793",8834:"fb803674",8837:"39b2b572",8860:"d6a8d944",8932:"751a793c",9229:"fb6a8c4a",9325:"c37072e4",9398:"be2f486c",9514:"1be78505",9558:"2bbcffe4",9683:"838277dc",9700:"f84c40fb",9925:"15cf2139"}[e]||e)+"."+{53:"49f2f64e",200:"737e4f29",204:"93dc62fc",434:"e14a8c54",560:"ff8dee9e",728:"f026b1ff",797:"5e05879c",799:"414024f5",822:"78febf18",860:"8a53f4cb",956:"6ff93fcb",1008:"58adc8fb",1017:"daeb0eed",1044:"45faf595",1427:"a354bc59",1526:"dfd0edd5",1612:"1fcec701",1725:"9914337f",1800:"b219d7bb",1804:"485ccbff",1948:"bae8b43d",2157:"15ac09e3",2197:"ecdd8cb0",2335:"43d53649",2349:"0e9c8c76",2375:"08e33229",2416:"7d5a9ac4",2429:"2f8d078b",2508:"540cfd76",2527:"4f89a53d",2603:"c0222f1d",2610:"a92c1cb6",2657:"6e598cc9",2676:"6dbfdf9e",2725:"455fe2c3",2779:"bc31be8d",2803:"fd9e2598",2949:"3bb68be8",2996:"e2d19c21",3085:"50df1a02",3225:"f9db721e",3237:"21501263",3255:"7400f567",3457:"8d1b9b6b",3641:"3b4d20c5",3684:"9598db3e",3774:"818f4e78",3870:"88941c05",3872:"8d31fde2",4023:"89e8eda4",4119:"55c20a28",4177:"7b4f8e38",4297:"14a1622e",4299:"4207e29d",4555:"77fee20b",4775:"e46891e9",4818:"26419845",4972:"9218459b",5040:"fb60d7dd",5105:"e4a94a4f",5127:"1a134767",5271:"a6014937",5384:"fa9e4732",5435:"f960958f",5597:"2ce11a4d",5604:"a17642cf",5642:"357976ee",5658:"e3c8943c",5717:"2e1de0ec",5787:"502b91fd",5825:"34e05a5a",5865:"2c78bdaf",5867:"c76c71f9",6012:"ffaf15c3",6052:"8d9d7a1c",6358:"853807d3",6397:"abd35334",6540:"69143075",6572:"89032e00",6639:"64871be4",6643:"107370bc",6661:"e7c7553b",6742:"a6d29434",6746:"4be6630b",7345:"a4f05ca4",7346:"65ad5c36",7367:"7a34c251",7414:"9652e478",7549:"92f4c018",7616:"7fc18856",7651:"e4c80eb1",7918:"d8f6f22a",8084:"bf154a45",8225:"ad348719",8231:"39b342ab",8309:"0b4e4bf4",8425:"02ae5002",8449:"d50d1f5b",8457:"e3e4eaac",8657:"92c61555",8677:"a10369c4",8693:"f4f5173c",8737:"91381a61",8834:"9a691c48",8837:"5117de81",8860:"69139e25",8932:"f474394d",9229:"54e59c3f",9325:"88bc509b",9398:"1f45d851",9455:"bfee0bcc",9514:"f046b65b",9558:"1f0fc702",9683:"544ec3e2",9700:"ab148738",9925:"18ea7c4d"}[e]+".js",r.miniCssF=e=>{},r.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),r.o=(e,d)=>Object.prototype.hasOwnProperty.call(e,d),c={},b="v-2:",r.l=(e,d,a,f)=>{if(c[e])c[e].push(d);else{var t,o;if(void 0!==a)for(var n=document.getElementsByTagName("script"),i=0;i{t.onerror=t.onload=null,clearTimeout(s);var b=c[e];if(delete c[e],t.parentNode&&t.parentNode.removeChild(t),b&&b.forEach((e=>e(a))),d)return d(a)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:t}),12e4);t.onerror=l.bind(null,t.onerror),t.onload=l.bind(null,t.onload),o&&document.head.appendChild(t)}},r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.p="/",r.gca=function(e){return e={17896441:"7918",38642333:"6643","4a09dd96":"53","2a07449d":"200",e46e340c:"204","13e7227b":"434",b91e83a7:"560","6246222d":"728",b93cd888:"797",e7600b97:"799","91e4f63c":"822",fc8ffda8:"860",b1ad0a9e:"956","9b54c487":"1008","84c20269":"1017","8061fab4":"1044",facca37e:"1427",e8d17a59:"1526","29c7a46b":"1612","9a73b948":"1725",c07e8a62:"1800",c58e39e2:"1804",b0739f8c:"1948","9f898b75":"2157","935f2afb":"2197",d9cdcec8:"2335","6016bee0":"2349","5b3cd5ae":"2375","01a9496d":"2416",d0b69af8:"2429",c1242cde:"2508",bcbfd5bd:"2527","68f81397":"2603","5dc48d01":"2610","352e0155":"2657","7ef46b74":"2676","541347e5":"2725",cf706b7e:"2779","7bb5633a":"2803","089e5a41":"2949",ae95ad8d:"2996","1f391b9e":"3085",d5c893db:"3225","1df93b7f":"3237",a7958b24:"3255",affd256f:"3457",f28dfc8e:"3641",c430b8da:"3684","4a2e0471":"3774","2d9584e5":"3870","4fc796a7":"3872","44d64813":"4023","317f9d80":"4119","167d5ab1":"4177","5523074d":"4297","88b38b2b":"4299",a606c19a:"4555","6d8a40f9":"4775",b3824f13:"4818",e2bc44c1:"5040","6fb0e7c9":"5105",e070c0d8:"5127",e3a88667:"5271","0425fa84":"5384","3f2d0791":"5435",b0207dc0:"5597","8c6322ce":"5604",bb396da7:"5642","92059b0a":"5658",b1f93f8c:"5717","8b8d160d":"5787",c4afae5c:"5825","36614f1b":"5865","9c5e90dd":"5867","97a152fd":"6012","36fd762b":"6052","272bb263":"6358","165f2d18":"6397",c8feb4f8:"6540","1671dd43":"6572",c1115317:"6639",bf383222:"6661","4e3fc5d7":"6742",b2277d04:"6746",d9ed3de0:"7345",d3303310:"7346","1e99a105":"7367","393be207":"7414","2369f063":"7549","306a8c6c":"7616","48eb1972":"7651",f1d05694:"8084",d7dc9408:"8225","8999d56c":"8231",d7d2d94a:"8309","8dd8b1e3":"8425","6b57b422":"8449","6b7916cd":"8457",e287e96b:"8657","3546d36b":"8677","57b26f6a":"8693","4579a793":"8737",fb803674:"8834","39b2b572":"8837",d6a8d944:"8860","751a793c":"8932",fb6a8c4a:"9229",c37072e4:"9325",be2f486c:"9398","1be78505":"9514","2bbcffe4":"9558","838277dc":"9683",f84c40fb:"9700","15cf2139":"9925"}[e]||e,r.p+r.u(e)},(()=>{var e={1303:0,532:0};r.f.j=(d,a)=>{var c=r.o(e,d)?e[d]:void 0;if(0!==c)if(c)a.push(c[2]);else if(/^(1303|532)$/.test(d))e[d]=0;else{var b=new Promise(((a,b)=>c=e[d]=[a,b]));a.push(c[2]=b);var f=r.p+r.u(d),t=new Error;r.l(f,(a=>{if(r.o(e,d)&&(0!==(c=e[d])&&(e[d]=void 0),c)){var b=a&&("load"===a.type?"missing":a.type),f=a&&a.target&&a.target.src;t.message="Loading chunk "+d+" failed.\n("+b+": "+f+")",t.name="ChunkLoadError",t.type=b,t.request=f,c[1](t)}}),"chunk-"+d,d)}},r.O.j=d=>0===e[d];var d=(d,a)=>{var c,b,f=a[0],t=a[1],o=a[2],n=0;if(f.some((d=>0!==e[d]))){for(c in t)r.o(t,c)&&(r.m[c]=t[c]);if(o)var i=o(r)}for(d&&d(a);n - +
-

Community

모두의 MLOps 릴리즈 소식

새로운 포스트나 수정사항은 Announcements에서 확인할 수 있습니다.

Question

프로젝트 내용과 관련된 궁금점은 Q&A를 통해 질문할 수 있습니다.

Suggestion

제안점은 Ideas를 통해 제안해 주시면 됩니다.

- +

Community

모두의 MLOps 릴리즈 소식

새로운 포스트나 수정사항은 Announcements에서 확인할 수 있습니다.

Question

프로젝트 내용과 관련된 궁금점은 Q&A를 통해 질문할 수 있습니다.

Suggestion

제안점은 Ideas를 통해 제안해 주시면 됩니다.

+ \ No newline at end of file diff --git a/community/contributors/index.html b/community/contributors/index.html index e24756ae..970fce81 100644 --- a/community/contributors/index.html +++ b/community/contributors/index.html @@ -7,13 +7,13 @@ - +
-

Contributors

Main Authors

Jongseob Jeon's avatar

Jongseob Jeon

Project Leader
마키나락스에서 머신러닝 엔지니어로 일하고 있습니다. 모두의 딥러닝을 통해 많은 사람들이 딥러닝을 쉽게 접했듯이 모두의 MLOps를 통해 많은 사람들이 MLOps에 쉽게 접할수 있길 바랍니다.
Jayeon Kim's avatar

Jayeon Kim

Project Member
비효율적인 작업을 자동화하는 것에 관심이 많습니다.
Youngchel Jang's avatar

Youngchel Jang

Project Member
마키나락스에서 MLOps Engineer로 일하고 있습니다. 단순하게 생각하는 노력을 하고 있습니다.

Contributors

Thank you for contributing our tutorials!

Jongsun Shinn's avatar

Jongsun Shinn

마키나락스에서 ML Engineer로 일하고 있습니다.
Sangwoo Shim's avatar

Sangwoo Shim

마키나락스에서 CTO로 일하고 있습니다. 마키나락스는 머신러닝 기반의 산업용 AI 솔루션을 개발하는 스타트업입니다. 산업 현장의 문제 해결을 통해 사람이 본연의 일에 집중할 수 있게 만드는 것, 그것이 우리가 하는 일입니다.
Seunghyun Ko's avatar

Seunghyun Ko

3i에서 MLOps Engineer로 일하고 있습니다. kubeflow에 관심이 많습니다.
SeungTae Kim's avatar

SeungTae Kim

Genesis Lab이라는 스타트업에서 Applied AI Engineer 인턴 업무를 수행하고 있습니다. 머신러닝 생태계가 우리 산업 전반에 큰 변화을 가져올 것이라 믿으며, 한 걸음씩 나아가고 있습니다.
Youngdon Tae's avatar

Youngdon Tae

백패커에서 ML 엔지니어로 일하고 있습니다. 자연어처리, 추천시스템, MLOps에 관심이 많습니다.
- +

Contributors

Main Authors

Jongseob Jeon's avatar

Jongseob Jeon

Project Leader
마키나락스에서 머신러닝 엔지니어로 일하고 있습니다. 모두의 딥러닝을 통해 많은 사람들이 딥러닝을 쉽게 접했듯이 모두의 MLOps를 통해 많은 사람들이 MLOps에 쉽게 접할수 있길 바랍니다.
Jayeon Kim's avatar

Jayeon Kim

Project Member
비효율적인 작업을 자동화하는 것에 관심이 많습니다.
Youngchel Jang's avatar

Youngchel Jang

Project Member
마키나락스에서 MLOps Engineer로 일하고 있습니다. 단순하게 생각하는 노력을 하고 있습니다.

Contributors

Thank you for contributing our tutorials!

Jongsun Shinn's avatar

Jongsun Shinn

마키나락스에서 ML Engineer로 일하고 있습니다.
Sangwoo Shim's avatar

Sangwoo Shim

마키나락스에서 CTO로 일하고 있습니다. 마키나락스는 머신러닝 기반의 산업용 AI 솔루션을 개발하는 스타트업입니다. 산업 현장의 문제 해결을 통해 사람이 본연의 일에 집중할 수 있게 만드는 것, 그것이 우리가 하는 일입니다.
Seunghyun Ko's avatar

Seunghyun Ko

3i에서 MLOps Engineer로 일하고 있습니다. kubeflow에 관심이 많습니다.
SeungTae Kim's avatar

SeungTae Kim

Genesis Lab이라는 스타트업에서 Applied AI Engineer 인턴 업무를 수행하고 있습니다. 머신러닝 생태계가 우리 산업 전반에 큰 변화을 가져올 것이라 믿으며, 한 걸음씩 나아가고 있습니다.
Youngdon Tae's avatar

Youngdon Tae

백패커에서 ML 엔지니어로 일하고 있습니다. 자연어처리, 추천시스템, MLOps에 관심이 많습니다.
+ \ No newline at end of file diff --git a/community/how-to-contribute/index.html b/community/how-to-contribute/index.html index e05f345c..541ff180 100644 --- a/community/how-to-contribute/index.html +++ b/community/how-to-contribute/index.html @@ -7,15 +7,15 @@ - +

How to Contribute

How to Start

Git Repo 준비

  1. 모두의 MLOps GitHub Repository에 접속합니다.

  2. 여러분의 개인 Repository로 Fork합니다.

  3. Forked Repository를 여러분의 작업 환경으로 git clone합니다.

환경 설정

  1. 모두의 MLOps는 Hugo 와 Node를 이용하고 있습니다.
    다음 명령어를 통해 필요한 패키지가 설치되어 있는지 확인합니다.
  • node & npm

    npm --version
  • hugo

    hugo version
  1. 필요한 node module을 설치합니다.

    npm install
  2. 프로젝트에서는 각 글의 일관성을 위해서 여러 markdown lint를 적용하고 있습니다.
    다음 명령어를 실행해 test를 진행한 후 커밋합니다.내용 수정 및 추가 후 lint가 맞는지 확인합니다.

    npm test
  3. lint 확인 완료 후 ci 를 실행합니다.

    npm ci
  4. 로컬에서 실행 후 수정한 글이 정상적으로 나오는지 확인합니다.

    npm run start

How to Contribute

1. 새로운 포스트를 작성할 때

새로운 포스트는 각 챕터와 포스트의 위치에 맞는 weight를 설정합니다.

  • Introduction: 1xx
  • Setup: 2xx
  • Kubeflow: 3xx
  • API Deployment: 4xx
  • Help: 10xx

2. 기존의 포스트를 수정할 때

기존의 포스트를 수정할 때 Contributor에 본인의 이름을 입력합니다.

contributors: ["John Doe", "Adam Smith"]

3. 프로젝트에 처음 기여할 때

만약 프로젝트에 처음 기여 할 때 content/kor/contributors에 본인의 이름으로 폴더를 생성한 후, _index.md라는 파일을 작성합니다.

예를 들어, minsoo kim이 본인의 영어 이름이라면, 폴더명은 minsoo-kim으로 하여 해당 폴더 내부의 _index.md파일에 다음의 내용을 작성합니다. -폴더명은 하이픈(-)으로 연결한 소문자로, title은 띄어쓰기를 포함한 CamelCase로 작성합니다.

---
title: "John Doe"
draft: false
---

After Pull Request

Pull Request를 생성하면 프로젝트에서는 자동으로 모두의 MLOps 운영진에게 리뷰 요청이 전해집니다. 최대 일주일 이내로 확인 후 Comment를 드릴 예정입니다.

- +폴더명은 하이픈(-)으로 연결한 소문자로, title은 띄어쓰기를 포함한 CamelCase로 작성합니다.

---
title: "John Doe"
draft: false
---

After Pull Request

Pull Request를 생성하면 프로젝트에서는 자동으로 모두의 MLOps 운영진에게 리뷰 요청이 전해집니다. 최대 일주일 이내로 확인 후 Comment를 드릴 예정입니다.

+ \ No newline at end of file diff --git a/docs/1.0/api-deployment/seldon-children/index.html b/docs/1.0/api-deployment/seldon-children/index.html index 3d312197..48da4c8e 100644 --- a/docs/1.0/api-deployment/seldon-children/index.html +++ b/docs/1.0/api-deployment/seldon-children/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
버전: 1.0

6. Multi Models

Multi Models

앞서 설명했던 방법들은 모두 단일 모델을 대상으로 했습니다.
이번 페이지에서는 여러 개의 모델을 연결하는 방법에 대해서 알아봅니다.

Pipeline

우선 모델을 2개를 생성하는 파이프라인을 작성하겠습니다.

모델은 앞서 사용한 SVC 모델에 StandardScaler를 추가하고 저장하도록 하겠습니다.

from functools import partial

import kfp
from kfp.components import InputPath, OutputPath, create_component_from_func


@partial(
create_component_from_func,
packages_to_install=["pandas", "scikit-learn"],
)
def load_iris_data(
data_path: OutputPath("csv"),
target_path: OutputPath("csv"),
):
import pandas as pd
from sklearn.datasets import load_iris

iris = load_iris()

data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
target = pd.DataFrame(iris["target"], columns=["target"])

data.to_csv(data_path, index=False)
target.to_csv(target_path, index=False)

@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
)
def train_scaler_from_csv(
data_path: InputPath("csv"),
scaled_data_path: OutputPath("csv"),
model_path: OutputPath("dill"),
input_example_path: OutputPath("dill"),
signature_path: OutputPath("dill"),
conda_env_path: OutputPath("dill"),
):
import dill
import pandas as pd
from sklearn.preprocessing import StandardScaler

from mlflow.models.signature import infer_signature
from mlflow.utils.environment import _mlflow_conda_env

data = pd.read_csv(data_path)

scaler = StandardScaler()
scaled_data = scaler.fit_transform(data)
scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)

scaled_data.to_csv(scaled_data_path, index=False)

with open(model_path, mode="wb") as file_writer:
dill.dump(scaler, file_writer)

input_example = data.sample(1)
with open(input_example_path, "wb") as file_writer:
dill.dump(input_example, file_writer)

signature = infer_signature(data, scaler.transform(data))
with open(signature_path, "wb") as file_writer:
dill.dump(signature, file_writer)

conda_env = _mlflow_conda_env(
additional_pip_deps=["scikit-learn"],
install_mlflow=False
)
with open(conda_env_path, "wb") as file_writer:
dill.dump(conda_env, file_writer)


@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
)
def train_svc_from_csv(
train_data_path: InputPath("csv"),
train_target_path: InputPath("csv"),
model_path: OutputPath("dill"),
input_example_path: OutputPath("dill"),
signature_path: OutputPath("dill"),
conda_env_path: OutputPath("dill"),
kernel: str,
):
import dill
import pandas as pd
from sklearn.svm import SVC

from mlflow.models.signature import infer_signature
from mlflow.utils.environment import _mlflow_conda_env

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

input_example = train_data.sample(1)
with open(input_example_path, "wb") as file_writer:
dill.dump(input_example, file_writer)

signature = infer_signature(train_data, clf.predict(train_data))
with open(signature_path, "wb") as file_writer:
dill.dump(signature, file_writer)

conda_env = _mlflow_conda_env(
additional_pip_deps=["scikit-learn"],
install_mlflow=False
)
with open(conda_env_path, "wb") as file_writer:
dill.dump(conda_env, file_writer)


@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
)
def upload_sklearn_model_to_mlflow(
model_name: str,
model_path: InputPath("dill"),
input_example_path: InputPath("dill"),
signature_path: InputPath("dill"),
conda_env_path: InputPath("dill"),
):
import os
import dill
from mlflow.sklearn import save_model

from mlflow.tracking.client import MlflowClient

os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
os.environ["AWS_ACCESS_KEY_ID"] = "minio"
os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

with open(model_path, mode="rb") as file_reader:
clf = dill.load(file_reader)

with open(input_example_path, "rb") as file_reader:
input_example = dill.load(file_reader)

with open(signature_path, "rb") as file_reader:
signature = dill.load(file_reader)

with open(conda_env_path, "rb") as file_reader:
conda_env = dill.load(file_reader)
save_model(
sk_model=clf,
path=model_name,
serialization_format="cloudpickle",
conda_env=conda_env,
signature=signature,
input_example=input_example,
)
run = client.create_run(experiment_id="0")
client.log_artifact(run.info.run_id, model_name)


from kfp.dsl import pipeline


@pipeline(name="multi_model_pipeline")
def multi_model_pipeline(kernel: str = "rbf"):
iris_data = load_iris_data()
scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])
_ = upload_sklearn_model_to_mlflow(
model_name="scaler",
model=scaled_data.outputs["model"],
input_example=scaled_data.outputs["input_example"],
signature=scaled_data.outputs["signature"],
conda_env=scaled_data.outputs["conda_env"],
)
model = train_svc_from_csv(
train_data=scaled_data.outputs["scaled_data"],
train_target=iris_data.outputs["target"],
kernel=kernel,
)
_ = upload_sklearn_model_to_mlflow(
model_name="svc",
model=model.outputs["model"],
input_example=model.outputs["input_example"],
signature=model.outputs["signature"],
conda_env=model.outputs["conda_env"],
)


if __name__ == "__main__":
kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")

파이프라인을 업로드하면 다음과 같이 나옵니다.

children-kubeflow.png

MLflow 대시보드를 확인하면 다음과 같이 두 개의 모델이 생성됩니다.

children-mlflow.png

각각의 run_id를 확인 후 다음과 같이 SeldonDeployment 스펙을 정의합니다.

apiVersion: machinelearning.seldon.io/v1
kind: SeldonDeployment
metadata:
name: multi-model-example
namespace: kubeflow-user-example-com
spec:
name: model
predictors:
- name: model

componentSpecs:
- spec:
volumes:
- name: model-provision-location
emptyDir: {}

initContainers:
- name: scaler-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret
- name: svc-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret

containers:
- name: scaler
image: seldonio/mlflowserver:1.8.0-dev
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0
- name: svc
image: seldonio/mlflowserver:1.8.0-dev
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0

graph:
name: scaler
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
- name: predict_method
type: STRING
value: "transform"
children:
- name: svc
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"

모델이 두 개가 되었으므로 각 모델의 initContainer와 container를 정의해주어야 합니다. 이 필드는 입력값을 array로 받으며 순서는 관계없습니다.

모델이 실행하는 순서는 graph에서 정의됩니다.

graph:
name: scaler
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
- name: predict_method
type: STRING
value: "transform"
children:
- name: svc
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"

graph의 동작 방식은 처음 받은 값을 정해진 predict_method로 변환한 뒤 children으로 정의된 모델에 전달하는 방식입니다. -이 경우 scaler -> svc 로 데이터가 전달됩니다.

이제 위의 스펙을 yaml파일로 생성해 보겠습니다.

cat <<EOF > multi-model.yaml
apiVersion: machinelearning.seldon.io/v1
kind: SeldonDeployment
metadata:
name: multi-model-example
namespace: kubeflow-user-example-com
spec:
name: model
predictors:
- name: model

componentSpecs:
- spec:
volumes:
- name: model-provision-location
emptyDir: {}

initContainers:
- name: scaler-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret
- name: svc-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret

containers:
- name: scaler
image: ghcr.io/mlops-for-all/mlflowserver
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0
- name: svc
image: ghcr.io/mlops-for-all/mlflowserver
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0

graph:
name: scaler
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
- name: predict_method
type: STRING
value: "transform"
children:
- name: svc
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
EOF

다음 명령어를 통해 API를 생성합니다.

kubectl apply -f multi-model.yaml

정상적으로 수행되면 다음과 같이 출력됩니다.

seldondeployment.machinelearning.seldon.io/multi-model-example created

정상적으로 생성됐는지 확인합니다.

kubectl get po -n kubeflow-user-example-com | grep multi-model-example

정상적으로 생성되면 다음과 비슷한 pod이 생성됩니다.

multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
- +이 경우 scaler -> svc 로 데이터가 전달됩니다.

이제 위의 스펙을 yaml파일로 생성해 보겠습니다.

cat <<EOF > multi-model.yaml
apiVersion: machinelearning.seldon.io/v1
kind: SeldonDeployment
metadata:
name: multi-model-example
namespace: kubeflow-user-example-com
spec:
name: model
predictors:
- name: model

componentSpecs:
- spec:
volumes:
- name: model-provision-location
emptyDir: {}

initContainers:
- name: scaler-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret
- name: svc-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret

containers:
- name: scaler
image: ghcr.io/mlops-for-all/mlflowserver
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0
- name: svc
image: ghcr.io/mlops-for-all/mlflowserver
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0

graph:
name: scaler
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
- name: predict_method
type: STRING
value: "transform"
children:
- name: svc
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
EOF

다음 명령어를 통해 API를 생성합니다.

kubectl apply -f multi-model.yaml

정상적으로 수행되면 다음과 같이 출력됩니다.

seldondeployment.machinelearning.seldon.io/multi-model-example created

정상적으로 생성됐는지 확인합니다.

kubectl get po -n kubeflow-user-example-com | grep multi-model-example

정상적으로 생성되면 다음과 비슷한 pod이 생성됩니다.

multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
+ \ No newline at end of file diff --git a/docs/1.0/api-deployment/seldon-fields/index.html b/docs/1.0/api-deployment/seldon-fields/index.html index c377dfe4..0bb84b2a 100644 --- a/docs/1.0/api-deployment/seldon-fields/index.html +++ b/docs/1.0/api-deployment/seldon-fields/index.html @@ -7,7 +7,7 @@ - + @@ -28,8 +28,8 @@ 이미지에는 모델이 로드될 때 필요한 패키지들이 모두 설치되어 있어야 합니다.

Seldon Core에서 지원하는 공식 이미지는 다음과 같습니다.

  • seldonio/sklearnserver
  • seldonio/mlflowserver
  • seldonio/xgboostserver
  • seldonio/tfserving

volumeMounts

volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true

initContainer에서 다운로드받은 데이터가 있는 경로를 알려주는 필드입니다.
이때 모델이 수정되는 것을 방지하기 위해 readOnly: true도 같이 주겠습니다.

securityContext

securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0

필요한 패키지를 설치할 때 pod이 권한이 없어서 패키지 설치를 수행하지 못할 수 있습니다.
이를 위해서 root 권한을 부여합니다. (다만 이 작업은 실제 서빙 시 보안 문제가 생길 수 있습니다.)

graph

graph:
name: model
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
children: []

모델이 동작하는 순서를 정의한 필드입니다.

name

모델 그래프의 이름입니다. container에서 정의된 이름을 사용합니다.

type

type은 크게 4가지가 있습니다.

  1. TRANSFORMER
  2. MODEL
  3. OUTPUT_TRANSFORMER
  4. ROUTER

각 type에 대한 자세한 설명은 Seldon Core Complex Graphs Metadata Example을 참조 바랍니다.

parameters

class init 에서 사용되는 값들입니다.
-sklearnserver에서 필요한 값은 다음 파일에서 확인할 수 있습니다.

class SKLearnServer(SeldonComponent):
def __init__(self, model_uri: str = None, method: str = "predict_proba"):

코드를 보면 model_urimethod를 정의할 수 있습니다.

children

순서도를 작성할 때 사용됩니다. 자세한 내용은 다음 페이지에서 설명합니다.

- +sklearnserver에서 필요한 값은 다음 파일에서 확인할 수 있습니다.

class SKLearnServer(SeldonComponent):
def __init__(self, model_uri: str = None, method: str = "predict_proba"):

코드를 보면 model_urimethod를 정의할 수 있습니다.

children

순서도를 작성할 때 사용됩니다. 자세한 내용은 다음 페이지에서 설명합니다.

+ \ No newline at end of file diff --git a/docs/1.0/api-deployment/seldon-iris/index.html b/docs/1.0/api-deployment/seldon-iris/index.html index 3103e2f7..b6941ad3 100644 --- a/docs/1.0/api-deployment/seldon-iris/index.html +++ b/docs/1.0/api-deployment/seldon-iris/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 배포된 API는 다음과 같은 규칙으로 생성됩니다. http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/

NODE_IP / NODE_PORT

Seldon Core 설치 시, Ambassador를 Ingress Controller로 설정하였으므로, SeldonDeployment로 생성된 API 서버는 모두 Ambassador의 Ingress gateway를 통해 요청할 수 있습니다.

따라서 우선 Ambassador Ingress Gateway의 url을 환경 변수로 설정합니다.

export NODE_IP=$(kubectl get nodes -o jsonpath='{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }')
export NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")

설정된 url을 확인합니다.

echo "NODE_IP"=$NODE_IP
echo "NODE_PORT"=$NODE_PORT

다음과 비슷하게 출력되어야 하며, 클라우드 등을 통해 설정할 경우, internal ip 주소가 설정되는 것을 확인할 수 있습니다.

NODE_IP=192.168.0.19
NODE_PORT=30486

namespace / seldon-deployment-name

SeldonDeployment가 배포된 namespaceseldon-deployment-name를 의미합니다. 이는 스펙을 정의할 때 metadata에 정의된 값을 사용합니다.

metadata:
name: sklearn
namespace: seldon-deploy

위의 예시에서는 namespace는 seldon-deploy, seldon-deployment-name은 sklearn 입니다.

method-name

SeldonDeployment에서 주로 사용하는 method-name은 두 가지가 있습니다.

  1. doc
  2. predictions

각각의 method의 자세한 사용 방법은 아래에서 설명합니다.

Using Swagger

우선 doc method를 사용하는 방법입니다. doc method를 이용하면 seldon에서 생성한 swagger에 접속할 수 있습니다.

1. Swagger 접속

위에서 설명한 ingress url 규칙에 따라 아래 주소를 통해 swagger에 접근할 수 있습니다.
-http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/

iris-swagger1.png

2. Swagger Predictions 메뉴 선택

UI에서 /seldon/seldon-deploy/sklearn/api/v1.0/predictions 메뉴를 선택합니다.

iris-swagger2.png

3. Try it out 선택

iris-swagger3.png

4. Request body에 data 입력

iris-swagger4.png

다음 데이터를 입력합니다.

{
"data": {
"ndarray":[[1.0, 2.0, 5.0, 6.0]]
}
}

5. 추론 결과 확인

Execute 버튼을 눌러서 추론 결과를 확인할 수 있습니다.

iris-swagger5.png

정상적으로 수행되면 다음과 같은 추론 결과를 얻습니다.

{
"data": {
"names": [
"t:0",
"t:1",
"t:2"
],
"ndarray": [
[
9.912315378486697e-7,
0.0007015931307746079,
0.9992974156376876
]
]
},
"meta": {
"requestPath": {
"classifier": "seldonio/sklearnserver:1.11.2"
}
}
}

Using CLI

또한, curl과 같은 http client CLI 도구를 활용해서도 API 요청을 수행할 수 있습니다.

예를 들어, 다음과 같이 /predictions를 요청하면

curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
-H 'Content-Type: application/json' \
-d '{ "data": { "ndarray": [[1,2,3,4]] } }'

아래와 같은 응답이 정상적으로 출력되는 것을 확인할 수 있습니다.

{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
- +http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/

iris-swagger1.png

2. Swagger Predictions 메뉴 선택

UI에서 /seldon/seldon-deploy/sklearn/api/v1.0/predictions 메뉴를 선택합니다.

iris-swagger2.png

3. Try it out 선택

iris-swagger3.png

4. Request body에 data 입력

iris-swagger4.png

다음 데이터를 입력합니다.

{
"data": {
"ndarray":[[1.0, 2.0, 5.0, 6.0]]
}
}

5. 추론 결과 확인

Execute 버튼을 눌러서 추론 결과를 확인할 수 있습니다.

iris-swagger5.png

정상적으로 수행되면 다음과 같은 추론 결과를 얻습니다.

{
"data": {
"names": [
"t:0",
"t:1",
"t:2"
],
"ndarray": [
[
9.912315378486697e-7,
0.0007015931307746079,
0.9992974156376876
]
]
},
"meta": {
"requestPath": {
"classifier": "seldonio/sklearnserver:1.11.2"
}
}
}

Using CLI

또한, curl과 같은 http client CLI 도구를 활용해서도 API 요청을 수행할 수 있습니다.

예를 들어, 다음과 같이 /predictions를 요청하면

curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
-H 'Content-Type: application/json' \
-d '{ "data": { "ndarray": [[1,2,3,4]] } }'

아래와 같은 응답이 정상적으로 출력되는 것을 확인할 수 있습니다.

{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
+ \ No newline at end of file diff --git a/docs/1.0/api-deployment/seldon-mlflow/index.html b/docs/1.0/api-deployment/seldon-mlflow/index.html index c9bb701d..68944bd1 100644 --- a/docs/1.0/api-deployment/seldon-mlflow/index.html +++ b/docs/1.0/api-deployment/seldon-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ minio에 접근하기 위한 credentials는 다음과 같습니다.

apiVersion: v1
type: Opaque
kind: Secret
metadata:
name: seldon-init-container-secret
namespace: kubeflow-user-example-com
data:
AWS_ACCESS_KEY_ID: bWluaW8K=
AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp
USE_SSL: ZmFsc2U=

AWS_ACCESS_KEY_ID 의 입력값은 minio입니다. 다만 secret의 입력값은 인코딩된 값이여야 되기 때문에 실제로 입력되는 값은 다음을 수행후 나오는 값이어야 합니다.

data에 입력되어야 하는 값들은 다음과 같습니다.

인코딩은 다음 명령어를 통해서 할 수 있습니다.

echo -n minio | base64

그러면 다음과 같은 값이 출력됩니다.

bWluaW8=

인코딩을 전체 값에 대해서 진행하면 다음과 같이 됩니다.

  • AWS_ACCESS_KEY_ID: bWluaW8=
  • AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
  • AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
  • USE_SSL: ZmFsc2U=

다음 명령어를 통해 secret을 생성할 수 있는 yaml파일을 생성합니다.

cat <<EOF > seldon-init-container-secret.yaml
apiVersion: v1
kind: Secret
metadata:
name: seldon-init-container-secret
namespace: kubeflow-user-example-com
type: Opaque
data:
AWS_ACCESS_KEY_ID: bWluaW8=
AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
USE_SSL: ZmFsc2U=
EOF

다음 명령어를 통해 secret을 생성합니다.

kubectl apply -f seldon-init-container-secret.yaml

정상적으로 수행되면 다음과 같이 출력됩니다.

secret/seldon-init-container-secret created

Seldon Core yaml

이제 Seldon Core를 생성하는 yaml파일을 작성합니다.

apiVersion: machinelearning.seldon.io/v1
kind: SeldonDeployment
metadata:
name: seldon-example
namespace: kubeflow-user-example-com
spec:
name: model
predictors:
- name: model

componentSpecs:
- spec:
volumes:
- name: model-provision-location
emptyDir: {}

initContainers:
- name: model-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret

containers:
- name: model
image: ghcr.io/mlops-for-all/mlflowserver
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0

graph:
name: model
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
children: []

이 전에 작성한 Seldon Fields와 달라진 점은 크게 두 부분입니다. initContainer에 envFrom 필드가 추가되었으며 args의 주소가 s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc 로 바뀌었습니다.

args

앞서 args의 첫번째 array는 우리가 다운로드받을 모델의 경로라고 했습니다.
그럼 mlflow에 저장된 모델의 경로는 어떻게 알 수 있을까요?

다시 mlflow에 들어가서 run을 클릭하고 모델을 누르면 다음과 같이 확인할 수 있습니다.

seldon-mlflow-0.png

이렇게 확인된 경로를 입력하면 됩니다.

envFrom

minio에 접근해서 모델을 다운로드 받는 데 필요한 환경변수를 입력해주는 과정입니다. -앞서 만든 seldon-init-container-secret를 이용합니다.

API 생성

우선 위에서 정의한 스펙을 yaml 파일로 생성하겠습니다.

apiVersion: machinelearning.seldon.io/v1
kind: SeldonDeployment
metadata:
name: seldon-example
namespace: kubeflow-user-example-com
spec:
name: model
predictors:
- name: model

componentSpecs:
- spec:
volumes:
- name: model-provision-location
emptyDir: {}

initContainers:
- name: model-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret

containers:
- name: model
image: ghcr.io/mlops-for-all/mlflowserver
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0

graph:
name: model
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
- name: xtype
type: STRING
value: "dataframe"
children: []
EOF

seldon pod을 생성합니다.

kubectl apply -f seldon-mlflow.yaml

정상적으로 수행되면 다음과 같이 출력됩니다.

seldondeployment.machinelearning.seldon.io/seldon-example created

이제 pod이 정상적으로 뜰 때까지 기다립니다.

kubectl get po -n kubeflow-user-example-com | grep seldon

다음과 비슷하게 출력되면 정상적으로 API를 생성했습니다.

seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

CLI를 이용해 생성된 API에는 다음 request를 통해 실행을 확인할 수 있습니다.

curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
-H 'Content-Type: application/json' \
-d '{
"data": {
"ndarray": [
[
143.0,
0.0,
30.0,
30.0
]
],
"names": [
"sepal length (cm)",
"sepal width (cm)",
"petal length (cm)",
"petal width (cm)"
]
}
}'

정상적으로 실행될 경우 다음과 같은 결과를 받을 수 있습니다.

{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
- +앞서 만든 seldon-init-container-secret를 이용합니다.

API 생성

우선 위에서 정의한 스펙을 yaml 파일로 생성하겠습니다.

apiVersion: machinelearning.seldon.io/v1
kind: SeldonDeployment
metadata:
name: seldon-example
namespace: kubeflow-user-example-com
spec:
name: model
predictors:
- name: model

componentSpecs:
- spec:
volumes:
- name: model-provision-location
emptyDir: {}

initContainers:
- name: model-initializer
image: gcr.io/kfserving/storage-initializer:v0.4.0
args:
- "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
- "/mnt/models"
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
envFrom:
- secretRef:
name: seldon-init-container-secret

containers:
- name: model
image: ghcr.io/mlops-for-all/mlflowserver
volumeMounts:
- mountPath: /mnt/models
name: model-provision-location
readOnly: true
securityContext:
privileged: true
runAsUser: 0
runAsGroup: 0

graph:
name: model
type: MODEL
parameters:
- name: model_uri
type: STRING
value: "/mnt/models"
- name: xtype
type: STRING
value: "dataframe"
children: []
EOF

seldon pod을 생성합니다.

kubectl apply -f seldon-mlflow.yaml

정상적으로 수행되면 다음과 같이 출력됩니다.

seldondeployment.machinelearning.seldon.io/seldon-example created

이제 pod이 정상적으로 뜰 때까지 기다립니다.

kubectl get po -n kubeflow-user-example-com | grep seldon

다음과 비슷하게 출력되면 정상적으로 API를 생성했습니다.

seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

CLI를 이용해 생성된 API에는 다음 request를 통해 실행을 확인할 수 있습니다.

curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
-H 'Content-Type: application/json' \
-d '{
"data": {
"ndarray": [
[
143.0,
0.0,
30.0,
30.0
]
],
"names": [
"sepal length (cm)",
"sepal width (cm)",
"petal length (cm)",
"petal width (cm)"
]
}
}'

정상적으로 실행될 경우 다음과 같은 결과를 받을 수 있습니다.

{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
+ \ No newline at end of file diff --git a/docs/1.0/api-deployment/seldon-pg/index.html b/docs/1.0/api-deployment/seldon-pg/index.html index 945448cb..dca3cae4 100644 --- a/docs/1.0/api-deployment/seldon-pg/index.html +++ b/docs/1.0/api-deployment/seldon-pg/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

3. Seldon Monitoring

Grafana & Prometheus

이제, 지난 페이지에서 생성했던 SeldonDeployment 로 API Request 를 반복적으로 수행해보고, 대시보드에 변화가 일어나는지 확인해봅니다.

대시보드

앞서 생성한 대시보드를 포트 포워딩합니다.

kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

API 요청

앞서 생성한 Seldon Deployment에 요청을 반복해서 보냅니다.

curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
-H 'Content-Type: application/json' \
-d '{ "data": { "ndarray": [[1,2,3,4]] } }'

그리고 그라파나 대시보드를 확인하면 다음과 같이 Global Request Rate 이 0 ops 에서 순간적으로 상승하는 것을 확인할 수 있습니다.

repeat-raise.png

이렇게 프로메테우스와 그라파나가 정상적으로 설치된 것을 확인할 수 있습니다.

- +
버전: 1.0

3. Seldon Monitoring

Grafana & Prometheus

이제, 지난 페이지에서 생성했던 SeldonDeployment 로 API Request 를 반복적으로 수행해보고, 대시보드에 변화가 일어나는지 확인해봅니다.

대시보드

앞서 생성한 대시보드를 포트 포워딩합니다.

kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

API 요청

앞서 생성한 Seldon Deployment에 요청을 반복해서 보냅니다.

curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
-H 'Content-Type: application/json' \
-d '{ "data": { "ndarray": [[1,2,3,4]] } }'

그리고 그라파나 대시보드를 확인하면 다음과 같이 Global Request Rate 이 0 ops 에서 순간적으로 상승하는 것을 확인할 수 있습니다.

repeat-raise.png

이렇게 프로메테우스와 그라파나가 정상적으로 설치된 것을 확인할 수 있습니다.

+ \ No newline at end of file diff --git a/docs/1.0/api-deployment/what-is-api-deployment/index.html b/docs/1.0/api-deployment/what-is-api-deployment/index.html index 82baedb1..70e5eb5d 100644 --- a/docs/1.0/api-deployment/what-is-api-deployment/index.html +++ b/docs/1.0/api-deployment/what-is-api-deployment/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 쿠버네티스 환경에서 이러한 추론 엔진들을 사용하여 API Deployment를 한다면 어떤 작업이 필요할까요? 추론 엔진을 배포하기 위한 Deployment, 추론 요청을 보낼 Endpoint를 생성하기 위한 Service, 외부에서의 추론 요청을 추론 엔진으로 보내기 위한 Ingress 등 많은 쿠버네티스 리소스를 배포해 주어야 합니다. -이것 이외에도, 많은 추론 요청이 들어왔을 경우의 스케일 아웃(scale-out), 추론 엔진 상태에 대한 모니터링, 개선된 모델이 나왔을 경우 버전 업데이트 등 추론 엔진을 운영할 때의 요구사항은 한두 가지가 아닙니다.

이러한 많은 요구사항을 처리하기 위해 추론 엔진들을 쿠버네티스 환경 위에서 한 번 더 추상화한 Serving Framework들이 개발되었습니다.

개발된 Serving Framework들은 다음과 같은 오픈소스들이 있습니다.

모두의 MLOps에서는 Seldon Core를 사용하여 API Deployment를 하는 과정을 다루어 보도록 하겠습니다.

- +이것 이외에도, 많은 추론 요청이 들어왔을 경우의 스케일 아웃(scale-out), 추론 엔진 상태에 대한 모니터링, 개선된 모델이 나왔을 경우 버전 업데이트 등 추론 엔진을 운영할 때의 요구사항은 한두 가지가 아닙니다.

이러한 많은 요구사항을 처리하기 위해 추론 엔진들을 쿠버네티스 환경 위에서 한 번 더 추상화한 Serving Framework들이 개발되었습니다.

개발된 Serving Framework들은 다음과 같은 오픈소스들이 있습니다.

모두의 MLOps에서는 Seldon Core를 사용하여 API Deployment를 하는 과정을 다루어 보도록 하겠습니다.

+ \ No newline at end of file diff --git a/docs/1.0/appendix/metallb/index.html b/docs/1.0/appendix/metallb/index.html index fad90ef6..ce24ef6a 100644 --- a/docs/1.0/appendix/metallb/index.html +++ b/docs/1.0/appendix/metallb/index.html @@ -7,7 +7,7 @@ - + @@ -21,8 +21,8 @@ 추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

kubectl edit svc/istio-ingressgateway -n istio-system
spec:
clusterIP: 10.103.72.5
clusterIPs:
- 10.103.72.5
ipFamilies:
- IPv4
ipFamilyPolicy: SingleStack
ports:
- name: status-port
port: 15021
protocol: TCP
targetPort: 15021
- name: http2
port: 80
protocol: TCP
targetPort: 8080
- name: https
port: 443
protocol: TCP
targetPort: 8443
- name: tcp
port: 31400
protocol: TCP
targetPort: 31400
- name: tls
port: 15443
protocol: TCP
targetPort: 15443
selector:
app: istio-ingressgateway
istio: ingressgateway
sessionAffinity: None
type: LoadBalancer # Change ClusterIP to LoadBalancer
loadBalancerIP: 192.168.35.100 # Add IP
status:
loadBalancer: {}

다시 확인을 해보면 External-IP 값이 192.168.35.100 인 것을 확인합니다.

kubectl get svc/istio-ingressgateway -n istio-system
NAME                   TYPE           CLUSTER-IP    EXTERNAL-IP      PORT(S)                                                                      AGE
istio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m

Web Browser 를 열어 http://192.168.35.100 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

login-after-istio-ingressgateway-setting.png

minio Dashboard

먼저 minio 의 Dashboard 를 제공하는 kubeflow 네임스페이스의 minio-service 서비스의 타입을 LoadBalancer로 변경하여 MetalLB로부터 로드 벨런싱 기능을 제공받기 전에, 현재 상태를 확인합니다.

kubectl get svc/minio-service -n kubeflow

해당 서비스의 타입은 ClusterIP이며, External-IP 값은 none 인 것을 확인할 수 있습니다.

NAME            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)    AGE
minio-service ClusterIP 10.109.209.87 <none> 9000/TCP 5h14m

type 을 LoadBalancer 로 변경하고 원하는 IP 주소를 입력하고 싶은 경우 loadBalancerIP 항목을 추가합니다.
추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

kubectl edit svc/minio-service -n kubeflow
apiVersion: v1
kind: Service
metadata:
annotations:
kubectl.kubernetes.io/last-applied-configuration: |
{"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>
creationTimestamp: "2022-01-05T08:44:23Z"
labels:
application-crd-id: kubeflow-pipelines
name: minio-service
namespace: kubeflow
resourceVersion: "21120"
uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48
spec:
clusterIP: 10.109.209.87
clusterIPs:
- 10.109.209.87
ipFamilies:
- IPv4
ipFamilyPolicy: SingleStack
ports:
- name: http
port: 9000
protocol: TCP
targetPort: 9000
selector:
app: minio
application-crd-id: kubeflow-pipelines
sessionAffinity: None
type: LoadBalancer # Change ClusterIP to LoadBalancer
loadBalancerIP: 192.168.35.101 # Add IP
status:
loadBalancer: {}

다시 확인을 해보면 External-IP 값이 192.168.35.101 인 것을 확인할 수 있습니다.

kubectl get svc/minio-service -n kubeflow
NAME            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)          AGE
minio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m

Web Browser 를 열어 http://192.168.35.101:9000 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

login-after-minio-setting.png

mlflow Dashboard

먼저 mlflow 의 Dashboard 를 제공하는 mlflow-system 네임스페이스의 mlflow-server-service 서비스의 타입을 LoadBalancer로 변경하여 MetalLB로부터 로드 벨런싱 기능을 제공받기 전에, 현재 상태를 확인합니다.

kubectl get svc/mlflow-server-service -n mlflow-system

해당 서비스의 타입은 ClusterIP이며, External-IP 값은 none 인 것을 확인할 수 있습니다.

NAME                    TYPE        CLUSTER-IP       EXTERNAL-IP   PORT(S)    AGE
mlflow-server-service ClusterIP 10.111.173.209 <none> 5000/TCP 4m50s

type 을 LoadBalancer 로 변경하고 원하는 IP 주소를 입력하고 싶은 경우 loadBalancerIP 항목을 추가합니다.
추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

kubectl edit svc/mlflow-server-service -n mlflow-system
apiVersion: v1
kind: Service
metadata:
annotations:
meta.helm.sh/release-name: mlflow-server
meta.helm.sh/release-namespace: mlflow-system
creationTimestamp: "2022-01-07T04:00:19Z"
labels:
app.kubernetes.io/managed-by: Helm
name: mlflow-server-service
namespace: mlflow-system
resourceVersion: "276246"
uid: e5d39fb7-ad98-47e7-b512-f9c673055356
spec:
clusterIP: 10.111.173.209
clusterIPs:
- 10.111.173.209
ipFamilies:
- IPv4
ipFamilyPolicy: SingleStack
ports:
- port: 5000
protocol: TCP
targetPort: 5000
selector:
app.kubernetes.io/name: mlflow-server
sessionAffinity: None
type: LoadBalancer # Change ClusterIP to LoadBalancer
loadBalancerIP: 192.168.35.102 # Add IP
status:
loadBalancer: {}

다시 확인을 해보면 External-IP 값이 192.168.35.102 인 것을 확인할 수 있습니다.

kubectl get svc/mlflow-server-service -n mlflow-system
NAME                    TYPE           CLUSTER-IP       EXTERNAL-IP      PORT(S)          AGE
mlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s

Web Browser 를 열어 http://192.168.35.102:5000 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

login-after-mlflow-setting.png

Grafana Dashboard

먼저 Grafana 의 Dashboard 를 제공하는 seldon-system 네임스페이스의 seldon-core-analytics-grafana 서비스의 타입을 LoadBalancer로 변경하여 MetalLB로부터 로드 벨런싱 기능을 제공받기 전에, 현재 상태를 확인합니다.

kubectl get svc/seldon-core-analytics-grafana -n seldon-system

해당 서비스의 타입은 ClusterIP이며, External-IP 값은 none 인 것을 확인할 수 있습니다.

NAME                            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)   AGE
seldon-core-analytics-grafana ClusterIP 10.109.20.161 <none> 80/TCP 94s

type 을 LoadBalancer 로 변경하고 원하는 IP 주소를 입력하고 싶은 경우 loadBalancerIP 항목을 추가합니다.
-추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
apiVersion: v1
kind: Service
metadata:
annotations:
meta.helm.sh/release-name: seldon-core-analytics
meta.helm.sh/release-namespace: seldon-system
creationTimestamp: "2022-01-07T04:16:47Z"
labels:
app.kubernetes.io/instance: seldon-core-analytics
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: grafana
app.kubernetes.io/version: 7.0.3
helm.sh/chart: grafana-5.1.4
name: seldon-core-analytics-grafana
namespace: seldon-system
resourceVersion: "280605"
uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
spec:
clusterIP: 10.109.20.161
clusterIPs:
- 10.109.20.161
ipFamilies:
- IPv4
ipFamilyPolicy: SingleStack
ports:
- name: service
port: 80
protocol: TCP
targetPort: 3000
selector:
app.kubernetes.io/instance: seldon-core-analytics
app.kubernetes.io/name: grafana
sessionAffinity: None
type: LoadBalancer # Change ClusterIP to LoadBalancer
loadBalancerIP: 192.168.35.103 # Add IP
status:
loadBalancer: {}

다시 확인을 해보면 External-IP 값이 192.168.35.103 인 것을 확인할 수 있습니다.

kubectl get svc/seldon-core-analytics-grafana -n seldon-system
NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

Web Browser 를 열어 http://192.168.35.103:80 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

login-after-grafana-setting.png

- +추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
apiVersion: v1
kind: Service
metadata:
annotations:
meta.helm.sh/release-name: seldon-core-analytics
meta.helm.sh/release-namespace: seldon-system
creationTimestamp: "2022-01-07T04:16:47Z"
labels:
app.kubernetes.io/instance: seldon-core-analytics
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: grafana
app.kubernetes.io/version: 7.0.3
helm.sh/chart: grafana-5.1.4
name: seldon-core-analytics-grafana
namespace: seldon-system
resourceVersion: "280605"
uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
spec:
clusterIP: 10.109.20.161
clusterIPs:
- 10.109.20.161
ipFamilies:
- IPv4
ipFamilyPolicy: SingleStack
ports:
- name: service
port: 80
protocol: TCP
targetPort: 3000
selector:
app.kubernetes.io/instance: seldon-core-analytics
app.kubernetes.io/name: grafana
sessionAffinity: None
type: LoadBalancer # Change ClusterIP to LoadBalancer
loadBalancerIP: 192.168.35.103 # Add IP
status:
loadBalancer: {}

다시 확인을 해보면 External-IP 값이 192.168.35.103 인 것을 확인할 수 있습니다.

kubectl get svc/seldon-core-analytics-grafana -n seldon-system
NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

Web Browser 를 열어 http://192.168.35.103:80 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

login-after-grafana-setting.png

+ \ No newline at end of file diff --git a/docs/1.0/appendix/pyenv/index.html b/docs/1.0/appendix/pyenv/index.html index 718ecb19..2aba72e0 100644 --- a/docs/1.0/appendix/pyenv/index.html +++ b/docs/1.0/appendix/pyenv/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
버전: 1.0

1. Python 가상환경 설치

파이썬 가상환경

Python 환경을 사용하다 보면 여러 버전의 Python 환경을 사용하고 싶은 경우나, 여러 프로젝트별 패키지 버전을 따로 관리하고 싶은 경우가 발생합니다.

이처럼 Python 환경 혹은 Python Package 환경을 가상화하여 관리하는 것을 쉽게 도와주는 도구로는 pyenv, conda, virtualenv, venv 등이 존재합니다.

이 중 모두의 MLOps에서는 pyenvpyenv-virtualenv를 설치하는 방법을 다룹니다.
pyenv는 Python 버전을 관리하는 것을 도와주며, pyenv-virtualenv는 pyenv의 plugin으로써 파이썬 패키지 환경을 관리하는 것을 도와줍니다.

pyenv 설치

Prerequisites

운영 체제별로 Prerequisites가 다릅니다. 다음 페이지를 참고하여 필수 패키지들을 설치해주시기 바랍니다.

설치 - macOS

  1. pyenv, pyenv-virtualenv 설치
brew update
brew install pyenv
brew install pyenv-virtualenv
  1. pyenv 설정

macOS의 경우 카탈리나 버전 이후 기본 shell이 zsh로 변경되었기 때문에 zsh을 사용하는 경우를 가정하였습니다.

echo 'eval "$(pyenv init -)"' >> ~/.zshrc
echo 'eval "$(pyenv virtualenv-init -)"' >> ~/.zshrc
source ~/.zshrc

pyenv 명령이 정상적으로 수행되는지 확인합니다.

pyenv --help
$ pyenv --help
Usage: pyenv <command> [<args>]

Some useful pyenv commands are:
--version Display the version of pyenv
activate Activate virtual environment
commands List all available pyenv commands
deactivate Deactivate virtual environment
exec Run an executable with the selected Python version
global Set or show the global Python version(s)
help Display help for a command
hooks List hook scripts for a given pyenv command
init Configure the shell environment for pyenv
install Install a Python version using python-build
local Set or show the local application-specific Python version(s)
prefix Display prefix for a Python version
rehash Rehash pyenv shims (run this after installing executables)
root Display the root directory where versions and shims are kept
shell Set or show the shell-specific Python version
shims List existing pyenv shims
uninstall Uninstall a specific Python version
version Show the current Python version(s) and its origin
version-file Detect the file that sets the current pyenv version
version-name Show the current Python version
version-origin Explain how the current Python version is set
versions List all Python versions available to pyenv
virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
virtualenv-delete Uninstall a specific Python virtualenv
virtualenv-init Configure the shell environment for pyenv-virtualenv
virtualenv-prefix Display real_prefix for a Python virtualenv version
virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
whence List all Python versions that contain the given executable
which Display the full path to an executable

See `pyenv help <command>' for information on a specific command.
For full documentation, see: https://github.com/pyenv/pyenv#readme

설치 - Ubuntu

  1. pyenv, pyenv-virtualenv 설치
curl https://pyenv.run | bash

다음과 같은 내용이 출력되면 정상적으로 설치된 것을 의미합니다.

  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
Dload Upload Total Spent Left Speed
0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239
Cloning into '/home/mlops/.pyenv'...
r
...
중략...
...
remote: Enumerating objects: 10, done.
remote: Counting objects: 100% (10/10), done.
remote: Compressing objects: 100% (6/6), done.
remote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0
Unpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.

WARNING: seems you still have not added 'pyenv' to the load path.


# See the README for instructions on how to set up
# your shell environment for Pyenv.

# Load pyenv-virtualenv automatically by adding
# the following to ~/.bashrc:

eval "$(pyenv virtualenv-init -)"

  1. pyenv 설정

기본 shell로 bash shell을 사용하는 경우를 가정하였습니다. bash에서 pyenv와 pyenv-virtualenv 를 사용할 수 있도록 설정합니다.

sudo vi ~/.bashrc

다음 문자열을 입력한 후 저장합니다.

export PATH="$HOME/.pyenv/bin:$PATH"
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"

shell을 restart 합니다.

exec $SHELL

pyenv 명령이 정상적으로 수행되는지 확인합니다.

pyenv --help

다음과 같은 메시지가 출력되면 정상적으로 설정된 것을 의미합니다.

$ pyenv
pyenv 2.2.2
Usage: pyenv <command> [<args>]

Some useful pyenv commands are:
--version Display the version of pyenv
activate Activate virtual environment
commands List all available pyenv commands
deactivate Deactivate virtual environment
doctor Verify pyenv installation and development tools to build pythons.
exec Run an executable with the selected Python version
global Set or show the global Python version(s)
help Display help for a command
hooks List hook scripts for a given pyenv command
init Configure the shell environment for pyenv
install Install a Python version using python-build
local Set or show the local application-specific Python version(s)
prefix Display prefix for a Python version
rehash Rehash pyenv shims (run this after installing executables)
root Display the root directory where versions and shims are kept
shell Set or show the shell-specific Python version
shims List existing pyenv shims
uninstall Uninstall a specific Python version
version Show the current Python version(s) and its origin
version-file Detect the file that sets the current pyenv version
version-name Show the current Python version
version-origin Explain how the current Python version is set
versions List all Python versions available to pyenv
virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
virtualenv-delete Uninstall a specific Python virtualenv
virtualenv-init Configure the shell environment for pyenv-virtualenv
virtualenv-prefix Display real_prefix for a Python virtualenv version
virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
whence List all Python versions that contain the given executable
which Display the full path to an executable

See `pyenv help <command>' for information on a specific command.
For full documentation, see: https://github.com/pyenv/pyenv#readme

pyenv 사용

Python 버전 설치

pyenv install <Python-Version> 명령을 통해 원하는 파이썬 버전을 설치할 수 있습니다. -이번 페이지에서는 예시로 kubeflow에서 기본으로 사용하는 파이썬 3.7.12 버전을 설치하겠습니다.

pyenv install 3.7.12

정상적으로 설치되면 다음과 같은 메시지가 출력됩니다.

$ pyenv install 3.7.12
Downloading Python-3.7.12.tar.xz...
-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
Installing Python-3.7.12...
patching file Doc/library/ctypes.rst
patching file Lib/test/test_unicode.py
patching file Modules/_ctypes/_ctypes.c
patching file Modules/_ctypes/callproc.c
patching file Modules/_ctypes/ctypes.h
patching file setup.py
patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
patching file Modules/_decimal/libmpdec/mpdecimal.h
Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

Python 가상환경 생성

pyenv virtualenv <Installed-Python-Version> <가상환경-이름> 명령을 통해 원하는 파이썬 버전의 파이썬 가상환경을 생성할 수 있습니다.

예시로 Python 3.7.12 버전의 demo라는 이름의 Python 가상환경을 생성하겠습니다.

pyenv virtualenv 3.7.12 demo
$ pyenv virtualenv 3.7.12 demo
Looking in links: /tmp/tmpffqys0gv
Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

Python 가상환경 사용

pyenv activate <가상환경 이름> 명령을 통해 위와 같은 방식으로 생성한 가상환경을 사용할 수 있습니다.

예시로는 demo라는 이름의 Python 가상환경을 사용하겠습니다.

pyenv activate demo

다음과 같이 현재 가상환경의 정보가 shell의 맨 앞에 출력되는 것을 확인할 수 있습니다.

Before

mlops@ubuntu:~$ pyenv activate demo

After

pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
(demo) mlops@ubuntu:~$

Python 가상환경 비활성화

source deactivate 명령을 통해 현재 사용 중인 가상환경을 비활성화할 수 있습니다.

source deactivate

Before

(demo) mlops@ubuntu:~$ source deactivate

After

mlops@ubuntu:~$ 
- +이번 페이지에서는 예시로 kubeflow에서 기본으로 사용하는 파이썬 3.7.12 버전을 설치하겠습니다.

pyenv install 3.7.12

정상적으로 설치되면 다음과 같은 메시지가 출력됩니다.

$ pyenv install 3.7.12
Downloading Python-3.7.12.tar.xz...
-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
Installing Python-3.7.12...
patching file Doc/library/ctypes.rst
patching file Lib/test/test_unicode.py
patching file Modules/_ctypes/_ctypes.c
patching file Modules/_ctypes/callproc.c
patching file Modules/_ctypes/ctypes.h
patching file setup.py
patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
patching file Modules/_decimal/libmpdec/mpdecimal.h
Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

Python 가상환경 생성

pyenv virtualenv <Installed-Python-Version> <가상환경-이름> 명령을 통해 원하는 파이썬 버전의 파이썬 가상환경을 생성할 수 있습니다.

예시로 Python 3.7.12 버전의 demo라는 이름의 Python 가상환경을 생성하겠습니다.

pyenv virtualenv 3.7.12 demo
$ pyenv virtualenv 3.7.12 demo
Looking in links: /tmp/tmpffqys0gv
Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

Python 가상환경 사용

pyenv activate <가상환경 이름> 명령을 통해 위와 같은 방식으로 생성한 가상환경을 사용할 수 있습니다.

예시로는 demo라는 이름의 Python 가상환경을 사용하겠습니다.

pyenv activate demo

다음과 같이 현재 가상환경의 정보가 shell의 맨 앞에 출력되는 것을 확인할 수 있습니다.

Before

mlops@ubuntu:~$ pyenv activate demo

After

pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
(demo) mlops@ubuntu:~$

Python 가상환경 비활성화

source deactivate 명령을 통해 현재 사용 중인 가상환경을 비활성화할 수 있습니다.

source deactivate

Before

(demo) mlops@ubuntu:~$ source deactivate

After

mlops@ubuntu:~$ 
+ \ No newline at end of file diff --git a/docs/1.0/further-readings/info/index.html b/docs/1.0/further-readings/info/index.html index a1736e90..debf0797 100644 --- a/docs/1.0/further-readings/info/index.html +++ b/docs/1.0/further-readings/info/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

다루지 못한 것들

MLOps Component

MLOps Concepts에서 다루었던 컴포넌트를 도식화하면 다음과 같습니다.

open-stacks-0.png

이 중 모두의 MLOps 에서 다룬 기술 스택들은 다음과 같습니다.

open-stacks-1.png

보시는 것처럼 아직 우리가 다루지 못한 많은 MLOps 컴포넌트들이 있습니다.

시간 관계상 이번에 모두 다루지는 못했지만, 만약 필요하다면 다음과 같은 오픈소스들을 먼저 참고해보면 좋을 것 같습니다.

open-stacks-2.png

세부 내용은 다음과 같습니다.

Mgmt.ComponentOpen Soruce
Data Mgmt.CollectionKafka
ValidationBeam
Feature StoreFlink
ML Model Dev. & ExperimentModelingJupyter
Analysis & Experiment Mgmt.MLflow
HPO Tuning & AutoMLKatib
Deploy Mgmt.Serving FrameworkSeldon Core
A/B TestIter8
MonitoringGrafana, Prometheus
Process Mgmt.pipelineKubeflow
CI/CDGithub Action
Continuous TrainingArgo Events
Platform Mgmt.Configuration Mgmt.Consul
Code Version Mgmt.Github, Minio
Logging(EFK) Elastic Search, Fluentd, Kibana
Resource Mgmt.Kubernetes
- +
버전: 1.0

다루지 못한 것들

MLOps Component

MLOps Concepts에서 다루었던 컴포넌트를 도식화하면 다음과 같습니다.

open-stacks-0.png

이 중 모두의 MLOps 에서 다룬 기술 스택들은 다음과 같습니다.

open-stacks-1.png

보시는 것처럼 아직 우리가 다루지 못한 많은 MLOps 컴포넌트들이 있습니다.

시간 관계상 이번에 모두 다루지는 못했지만, 만약 필요하다면 다음과 같은 오픈소스들을 먼저 참고해보면 좋을 것 같습니다.

open-stacks-2.png

세부 내용은 다음과 같습니다.

Mgmt.ComponentOpen Soruce
Data Mgmt.CollectionKafka
ValidationBeam
Feature StoreFlink
ML Model Dev. & ExperimentModelingJupyter
Analysis & Experiment Mgmt.MLflow
HPO Tuning & AutoMLKatib
Deploy Mgmt.Serving FrameworkSeldon Core
A/B TestIter8
MonitoringGrafana, Prometheus
Process Mgmt.pipelineKubeflow
CI/CDGithub Action
Continuous TrainingArgo Events
Platform Mgmt.Configuration Mgmt.Consul
Code Version Mgmt.Github, Minio
Logging(EFK) Elastic Search, Fluentd, Kibana
Resource Mgmt.Kubernetes
+ \ No newline at end of file diff --git a/docs/1.0/introduction/component/index.html b/docs/1.0/introduction/component/index.html index 2b0d5f17..a5b1f5b1 100644 --- a/docs/1.0/introduction/component/index.html +++ b/docs/1.0/introduction/component/index.html @@ -7,14 +7,14 @@ - +
버전: 1.0

3. Components of MLOps

Practitioners guide to MLOps

2021년 5월에 발표된 구글의 white paper : Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning에서는 MLOps의 핵심 기능들로 다음과 같은 것들을 언급하였습니다.

mlops-component

각 기능이 어떤 역할을 하는지 살펴보겠습니다.

1. Experimentation

실험(Experimentation)은 머신러닝 엔지니어들이 데이터를 분석하고, 프로토타입 모델을 만들며 학습 기능을 구현할 수 있도록 하는 다음과 같은 기능을 제공합니다.

  • 깃(Git)과 같은 버전 컨트롤 도구와 통합된 노트북(Jupyter Notebook) 환경 제공
  • 사용한 데이터, 하이퍼 파라미터, 평가 지표를 포함한 실험 추적 기능 제공
  • 데이터와 모델에 대한 분석 및 시각화 기능 제공

2. Data Processing

데이터 처리(Data Processing)는 머신러닝 모델 개발 단계, 지속적인 학습(Continuous Training) 단계, 그리고 API 배포(API Deployment) 단계에서 많은 양의 데이터를 사용할 수 있게 해 주는 다음과 같은 기능을 제공합니다.

  • 다양한 데이터 소스와 서비스에 호환되는 데이터 커넥터(connector) 기능 제공
  • 다양한 형태의 데이터와 호환되는 데이터 인코더(encoder) & 디코더(decoder) 기능 제공
  • 다양한 형태의 데이터에 대한 데이터 변환과 피처 엔지니어링(feature engineering) 기능 제공
  • 학습과 서빙을 위한 확장 가능한 배치, 스트림 데이터 처리 기능 제공

3. Model training

모델 학습(Model training)은 모델 학습을 위한 알고리즘을 효율적으로 실행시켜주는 다음과 같은 기능을 제공합니다.

  • ML 프레임워크의 실행을 위한 환경 제공
  • 다수의 GPU / 분산 학습 사용을 위한 분산 학습 환경 제공
  • 하이퍼 파라미터 튜닝과 최적화 기능 제공

4. Model evaluation

모델 평가(Model evaluation)는 실험 환경과 상용 환경에서 동작하는 모델의 성능을 관찰할 수 있는 다음과 같은 기능을 제공합니다.

  • 평가 데이터에 대한 모델 성능 평가 기능
  • 서로 다른 지속 학습 실행 결과에 대한 예측 성능 추적
  • 서로 다른 모델의 성능 비교와 시각화
  • 해석할 수 있는 AI 기술을 이용한 모델 출력 해석 기능 제공

5. Model serving

모델 서빙(Model serving)은 상용 환경에 모델을 배포하고 서빙하기 위한 다음과 같은 기능들을 제공합니다.

  • 저 지연 추론과 고가용성 추론 기능 제공
  • 다양한 ML 모델 서빙 프레임워크 지원(Tensorflow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGGoost. etc)
  • 복잡한 형태의 추론 루틴 기능 제공, 예를 들어 전처리(preprocess) 또는 후처리(postprocess) 기능과 최종 결과를 위해 다수의 모델이 사용되는 경우를 말합니다.
  • 순간적으로 치솟는 추론 요청을 처리하기 위한 오토 스케일링(autoscaling) 기능 제공
  • 추론 요청과 추론 결과에 대한 로깅 기능 제공

6. Online experimentation

온라인 실험(Online experimentation)은 새로운 모델이 생성되었을 때, 이 모델을 배포하면 어느 정도의 성능을 보일 것인지 검증하는 기능을 제공합니다. 이 기능은 새 모델을 배포하는 것까지 연동하기 위해 모델 저장소(Model Registry)와 연동되어야 합니다.

  • 카나리(canary) & 섀도(shadow) 배포 기능 제공
  • A/B 테스트 기능 제공
  • 멀티 암드 밴딧(Multi-armed bandit) 테스트 기능 제공

7. Model Monitoring

모델 모니터링(Model Monitoring)은 상용 환경에 배포된 모델이 정상적으로 동작하고 있는지를 모니터링하는 기능을 제공합니다. 예를 들어 모델의 성능이 떨어져 업데이트가 필요한지에 대한 정보 등을 제공합니다.

8. ML Pipeline

머신러닝 파이프라인(ML Pipeline)은 상용 환경에서 복잡한 ML 학습과 추론 작업을 구성하고 제어하고 자동화하기 위한 다음과 같은 기능을 제공합니다.

  • 다양한 이벤트를 소스를 통한 파이프라인 실행 기능
  • 파이프라인 파라미터와 생성되는 산출물 관리를 위한 머신러닝 메타데이터 추적과 연동 기능
  • 일반적인 머신러닝 작업을 위한 내장 컴포넌트 지원과 사용자가 직접 구현한 컴포넌트에 대한 지원 기능
  • 서로 다른 실행 환경 제공 기능

9. Model Registry

모델 저장소(Model Registry)는 머신러닝 모델의 생명 주기(Lifecycle)을 중앙 저장소에서 관리할 수 있게 해 주는 기능을 제공합니다.

  • 학습된 모델 그리고 배포된 모델에 대한 등록, 추적, 버저닝 기능 제공
  • 배포를 위해 필요한 데이터와 런타임 패키지들에 대한 정보 저장 기능

10. Dataset and Feature Repository

  • 데이터에 대한 공유, 검색, 재사용 그리고 버전 관리 기능
  • 이벤트 스트리밍 및 온라인 추론 작업에 대한 실시간 처리 및 저 지연 서빙 기능
  • 사진, 텍스트, 테이블 형태의 데이터와 같은 다양한 형태의 데이터 지원 기능

11. ML Metadata and Artifact Tracking

MLOps의 각 단계에서는 다양한 형태의 산출물들이 생성됩니다. ML 메타데이터는 이런 산출물들에 대한 정보를 의미합니다. -ML 메타데이터와 산출물 관리는 산출물의 위치, 타입, 속성, 그리고 관련된 실험(experiment)에 대한 정보를 관리하기 위해 다음과 같은 기능들을 제공합니다.

  • ML 산출물에 대한 히스토리 관리 기능
  • 실험과 파이프라인 파라미터 설정에 대한 추적, 공유 기능
  • ML 산출물에 대한 저장, 접근, 시각화, 다운로드 기능 제공
  • 다른 MLOps 기능과의 통합 기능 제공
- +ML 메타데이터와 산출물 관리는 산출물의 위치, 타입, 속성, 그리고 관련된 실험(experiment)에 대한 정보를 관리하기 위해 다음과 같은 기능들을 제공합니다.

  • ML 산출물에 대한 히스토리 관리 기능
  • 실험과 파이프라인 파라미터 설정에 대한 추적, 공유 기능
  • ML 산출물에 대한 저장, 접근, 시각화, 다운로드 기능 제공
  • 다른 MLOps 기능과의 통합 기능 제공
+ \ No newline at end of file diff --git a/docs/1.0/introduction/intro/index.html b/docs/1.0/introduction/intro/index.html index 4f57ef6c..5d78cd53 100644 --- a/docs/1.0/introduction/intro/index.html +++ b/docs/1.0/introduction/intro/index.html @@ -7,7 +7,7 @@ - + @@ -26,8 +26,8 @@ 이 말은 머신러닝팀과 운영팀 사이에 문제가 발생했다는 의미입니다. 그럼 왜 머신러닝팀과 운영팀에는 문제가 발생했을까요? 두 팀 간의 문제를 알아보기 위해서 추천시스템을 예시로 알아보겠습니다.

Rule Based

처음 추천시스템을 만드는 경우 간단한 규칙을 기반으로 아이템을 추천합니다. 예를 들어서 1주일간 판매량이 가장 많은 순서대로 보여주는 식의 방식을 이용합니다. 이 방식으로 모델을 정한다면 특별한 이유가 없는 이상 모델의 수정이 필요 없습니다.

Machine Learning

서비스의 규모가 조금 커지고 로그 데이터가 많이 쌓인다면 이를 이용해 아이템 기반 혹은 유저 기반의 머신러닝 모델을 생성합니다. 이때 모델은 정해진 주기에 따라 모델을 재학습 후 재배포합니다.

Deep Learning

개인화 추천에 대한 요구가 더 커지고 더 좋은 성능을 내는 모델을 필요해질 경우 딥러닝을 이용한 모델을 개발하기 시작합니다. 이때 만드는 모델은 머신러닝과 같이 정해진 주기에 따라 모델을 재학습 후 재배포합니다.

graph

위에서 설명한 것을 x축을 모델의 복잡도, y축을 모델의 성능으로 두고 그래프로 표현한다면 다음과 같이 복잡도가 올라갈 때 모델의 성능이 올라가는 상승 관계를 갖습니다. 머신러닝에서 딥러닝으로 넘어갈 머신러닝 팀이 새로 생기게 됩니다.

만약 관리해야할 모델이 적다면 서로 협업을 통해서 충분히 해결할 수 있지만 개발해야 할 모델이 많아진다면 DevOps의 경우와 같이 사일로 현상이 나타나게 됩니다.

DevOps의 목표와 맞춰서 생각해보면 MLOps의 목표는 개발한 모델이 정상적으로 배포될 수 있는지 테스트하는 것입니다. 개발팀에서 개발한 기능이 정상적으로 배포될 수 있는지 확인하는 것이 DevOps의 목표였다면, MLOps의 목표는 머신러닝 팀에서 개발한 모델이 정상적으로 배포될 수 있는지 확인하는 것입니다.

2) ML -> Ops

하지만 최근 나오고 있는 MLOps 관련 제품과 설명을 보면 꼭 앞에서 설명한 목표만을 대상으로 하고 있지 않습니다. 어떤 경우에는 머신러닝 팀에서 만든 모델을 이용해 직접 운영을 할 수 있도록 도와주려고 합니다. 이러한 니즈는 최근 머신러닝 프로젝트가 진행되는 과정에서 알 수 있습니다.

추천시스템의 경우 운영에서 간단한 모델부터 시작해 운영할 수 있었습니다. 하지만 자연어, 이미지와 같은 곳에서는 규칙 기반의 모델보다는 딥러닝을 이용해 주어진 태스크를 해결할 수 있는지 검증(POC)를 선행하는 경우가 많습니다. 검증이 끝난 프로젝트는 이제 서비스를 위한 운영 환경을 개발하기 시작합니다. 하지만 머신러닝 팀 내의 자체 역량으로는 이 문제를 해결하기 쉽지 않습니다. 이를 해결하기 위해서 MLOps가 필요한 경우도 있습니다.

3) 결론

요약하자면 MLOps는 두 가지 목표가 있습니다. 앞에서 설명한 MLOps는 ML+Ops 로 두 팀의 생산성 향상을 위한 것이였습니다. -반면, 뒤에서 설명한 것은 ML->Ops 로 머신러닝 팀에서 직접 운영을 할 수 있도록 도와주는 것을 말합니다.

- +반면, 뒤에서 설명한 것은 ML->Ops 로 머신러닝 팀에서 직접 운영을 할 수 있도록 도와주는 것을 말합니다.

+ \ No newline at end of file diff --git a/docs/1.0/introduction/levels/index.html b/docs/1.0/introduction/levels/index.html index ab35ffb3..1a328e43 100644 --- a/docs/1.0/introduction/levels/index.html +++ b/docs/1.0/introduction/levels/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ 예를 들어서 어떤 기능에서는 파이썬 3.7을 쓰고 어떤 기능에서는 파이썬 3.8을 쓴다면 다음과 같은 상황을 자주 목격할 수 있습니다.

이러한 상황이 일어나는 이유는 머신러닝 모델의 특성에 있습니다. 학습된 머신러닝 모델이 동작하기 위해서는 3가지가 필요합니다.

  1. 파이썬 코드
  2. 학습된 가중치
  3. 환경 (패키지, 버전 등)

만약 이 3가지 중 한 가지라도 전달이 잘못 된다면 모델이 동작하지 않거나 예상하지 못한 예측을 할수 있습니다. 그런데 많은 경우 환경이 일치하지 않아서 동작하지 않는 경우가 많습니다. 머신러닝은 다양한 오픈소스를 사용하는데 오픈소스는 특성상 어떤 버전을 쓰는지에 따라서 같은 함수라도 결과가 다를 수 있습니다.

이러한 문제는 서비스 초기에는 관리할 모델이 많지 않기 때문에 금방 해결할 수 있습니다. 하지만 관리하는 기능들이 많아지고 서로 소통에 어려움을 겪게 된다면 성능이 더 좋은 모델을 빠르게 배포할 수 없게 됩니다.

1단계: ML 파이프라인 자동화

Pipeline

level-1-pipeline

그래서 MLOps에서는 “파이프라인(Pipeline)”을 이용해 이러한 문제를 방지하고자 했습니다. MLOps의 파이프라인은 도커와 같은 컨테이너를 이용해 머신러닝 엔지니어가 모델 개발에 사용한 것과 동일한 환경으로 동작되는 것을 보장합니다. 이를 통해서 환경이 달라서 모델이 동작하지 않는 상황을 방지합니다.

그런데 파이프라인은 범용적인 용어로 여러 다양한 태스크에서 사용됩니다. 머신러닝 엔지니어가 작성하는 파이프라인의 역할은 무엇일까요?
머신러닝 엔지니어가 작성하는 파이프라인은 학습된 모델을 생산합니다. 그래서 파이프라인 대신 학습 파이프라인(Training Pipeline)이 더 정확하다고 볼 수 있습니다.

Continuous Training

level-1-ct.png

그리고 Continuous Training(CT) 개념이 추가됩니다. 그렇다면 CT는 왜 필요할까요?

Auto Retrain

Real World에서 데이터는 Data Shift라는 데이터의 분포가 계속해서 변하는 특징이 있습니다. 그래서 과거에 학습한 모델이 시간이 지남에 따라 모델의 성능이 저하되는 문제가 있습니다. 이 문제를 해결하는 가장 간단하고 효과적인 해결책은 바로 최근 데이터를 이용해 모델을 재학습하는 것입니다. 변화된 데이터 분포에 맞춰서 모델을 재학습하면 다시 준수한 성능을 낼 수 있습니다.

Auto Deploy

하지만 제조업과 같이 한 공장에서 여러 레시피를 처리하는 경우 무조건 재학습을 하는 것이 좋지 않을 수 도 있습니다. Blind Spot이 대표적인 예입니다.

예를 들어서 자동차 생산 라인에서 모델 A에 대해서 모델을 만들고 이를 이용해 예측을 진행하고 있었습니다. 만약 전혀 다른 모델 B가 들어오면 이전에 보지 못한 데이터 패턴이기 때문에 모델 B에 대해서 새로운 모델을 학습합니다.

이제 모델 B에 대해서 모델을 만들었기 때문에 모델은 예측을 진행할 것 입니다. 그런데 만약 데이터가 다시 모델 A로 바뀐다면 어떻게 할까요?
만약 Retraining 규칙만 있다면 다시 모델 A에 대해서 새로운 모델을 학습하게 됩니다. 그런데 머신러닝 모델이 충분한 성능을 보이기 위해서는 충분한 양의 데이터가 모여야 합니다. Blind Spot이란 이렇게 데이터를 모으기 위해서 모델이 동작하지 않는 구간을 말합니다.

이러한 Blind Spot을 해결하는 방법은 간단할 수 있습니다. 바로 모델 A에 대한 모델이 과거에 있었는지 확인하고 만약 있었다면 새로운 모델을 바로 학습하기 보다는 이 전 모델을 이용해 다시 예측을 하면 이런 Blind Spot을 해결할 수 있습니다. 이렇게 모델와 같은 메타 데이터를 이용해 모델을 자동으로 변환해주는 것을 Auto Deploy라고 합니다.

정리하자면 CT를 위해서는 Auto Retraining의과 Auto Deploy 두 가지 기능이 필요합니다. 둘은 서로의 단점을 보완해 계속해서 모델의 성능을 유지할 수 있게 합니다.

2단계: CI/CD 파이프라인의 자동화

level-2

2단계의 제목은 CI와 CD의 자동화 입니다. DevOps에서의 CI/CD의 대상은 소스 코드입니다. 그렇다면 MLOps는 어떤 것이 CI/CD의 대상일까요?

MLOps의 CI/CD 대상 또한 소스 코드인 것은 맞지만 조금 더 엄밀히 정의하자면 학습 파이프라인이라고 볼 수 있습니다.

그래서 모델을 학습하는데 있어서 영향이 있는 변화에 대해서 실제로 모델이 정상적으로 학습이 되는지 (CI), 학습된 모델이 정상적으로 동작하는지 (CD)를 확인해야 합니다. 그래서 학습을 하는 코드에 직접적인 수정이 있는 경우에는 CI/CD를 진행해야 합니다.

코드 외에도 사용하는 패키지의 버전, 파이썬의 버전 변경도 CI/CD의 대상입니다. 많은 경우 머신 러닝은 오픈 소스를 이용합니다. 하지만 오픈 소스는 그 특성상 버전이 바뀌었을 때 함수의 내부 로직이 변하는 경우도 있습니다. 물론 어느 정도 버전이 올라 갈 때 이와 관련된 알림을 주지만 한 번에 버전이 크게 바뀐다면 이러한 변화를 모를 수도 있습니다.
-그래서 사용하는 패키지의 버전이 변하는 경우에도 CI/CD를 통해 정상적으로 모델이 학습, 동작하는지 확인을 해야 합니다.

- +그래서 사용하는 패키지의 버전이 변하는 경우에도 CI/CD를 통해 정상적으로 모델이 학습, 동작하는지 확인을 해야 합니다.

+ \ No newline at end of file diff --git a/docs/1.0/introduction/why_kubernetes/index.html b/docs/1.0/introduction/why_kubernetes/index.html index 47abaab8..edb94a33 100644 --- a/docs/1.0/introduction/why_kubernetes/index.html +++ b/docs/1.0/introduction/why_kubernetes/index.html @@ -7,7 +7,7 @@ - + @@ -23,8 +23,8 @@ 만약, 특정 서비스가 장애를 일으켰다면 여러 컨테이너의 로그를 확인해가며 문제를 파악해야 합니다.
또한, 특정 클러스터나 특정 컨테이너에 작업이 몰리지 않도록 스케줄링(Scheduling)하고 로드 밸런싱(Load Balancing)하며, 스케일링(Scaling)하는 등의 수많은 작업을 담당해야 합니다. 이렇게 수많은 컨테이너의 상태를 지속해서 관리하고 운영하는 과정을 조금이나마 쉽게, 자동으로 할 수 있는 기능을 제공해주는 소프트웨어가 바로 컨테이너 오케스트레이션 시스템입니다.

머신러닝에서는 어떻게 쓰일 수 있을까요?
-예를 들어서 GPU가 있어야 하는 딥러닝 학습 코드가 패키징된 컨테이너는 사용 가능한 GPU가 있는 클러스터에서 수행하고, 많은 메모리를 필요로 하는 데이터 전처리 코드가 패키징된 컨테이너는 메모리의 여유가 많은 클러스터에서 수행하고, 학습 중에 클러스터에 문제가 생기면 자동으로 같은 컨테이너를 다른 클러스터로 이동시키고 다시 학습을 진행하는 등의 작업을 사람이 일일이 수행하지 않고, 자동으로 관리하는 시스템을 개발한 뒤 맡기는 것입니다.

집필을 하는 2022년을 기준으로 쿠버네티스는 컨테이너 오케스트레이션 시스템의 사실상의 표준(De facto standard)입니다.

CNCF에서 2018년 발표한 Survey 에 따르면 다음 그림과 같이 이미 두각을 나타내고 있었으며, 2019년 발표한 Survey에 따르면 그중 78%가 상용 수준(Production Level)에서 사용하고 있다는 것을 알 수 있습니다.

k8s-graph

쿠버네티스 생태계가 이처럼 커지게 된 이유에는 여러 가지 이유가 있습니다. 하지만 도커와 마찬가지로 쿠버네티스 역시 머신러닝 기반의 서비스에서만 사용하는 기술이 아니기에, 자세히 다루기에는 상당히 많은 양의 기술적인 내용을 다루어야 하므로 이번 모두의 MLOps에서는 자세한 내용은 생략할 예정입니다.

다만, 모두의 MLOps에서 앞으로 다룰 내용은 도커와 쿠버네티스에 대한 내용을 어느 정도 알고 계신 분들을 대상으로 작성하였습니다. 따라서 쿠버네티스에 대해 익숙하지 않으신 분들은 다음 쿠버네티스 공식 문서, subicura 님의 개인 블로그 글 등의 쉽고 자세한 자료들을 먼저 참고해주시는 것을 권장합니다.

- +예를 들어서 GPU가 있어야 하는 딥러닝 학습 코드가 패키징된 컨테이너는 사용 가능한 GPU가 있는 클러스터에서 수행하고, 많은 메모리를 필요로 하는 데이터 전처리 코드가 패키징된 컨테이너는 메모리의 여유가 많은 클러스터에서 수행하고, 학습 중에 클러스터에 문제가 생기면 자동으로 같은 컨테이너를 다른 클러스터로 이동시키고 다시 학습을 진행하는 등의 작업을 사람이 일일이 수행하지 않고, 자동으로 관리하는 시스템을 개발한 뒤 맡기는 것입니다.

집필을 하는 2022년을 기준으로 쿠버네티스는 컨테이너 오케스트레이션 시스템의 사실상의 표준(De facto standard)입니다.

CNCF에서 2018년 발표한 Survey 에 따르면 다음 그림과 같이 이미 두각을 나타내고 있었으며, 2019년 발표한 Survey에 따르면 그중 78%가 상용 수준(Production Level)에서 사용하고 있다는 것을 알 수 있습니다.

k8s-graph

쿠버네티스 생태계가 이처럼 커지게 된 이유에는 여러 가지 이유가 있습니다. 하지만 도커와 마찬가지로 쿠버네티스 역시 머신러닝 기반의 서비스에서만 사용하는 기술이 아니기에, 자세히 다루기에는 상당히 많은 양의 기술적인 내용을 다루어야 하므로 이번 모두의 MLOps에서는 자세한 내용은 생략할 예정입니다.

다만, 모두의 MLOps에서 앞으로 다룰 내용은 도커와 쿠버네티스에 대한 내용을 어느 정도 알고 계신 분들을 대상으로 작성하였습니다. 따라서 쿠버네티스에 대해 익숙하지 않으신 분들은 다음 쿠버네티스 공식 문서, subicura 님의 개인 블로그 글 등의 쉽고 자세한 자료들을 먼저 참고해주시는 것을 권장합니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html b/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html index e4deddb4..527c4b56 100644 --- a/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html +++ b/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

6. Kubeflow Pipeline 관련

Central Dashboard의 왼쪽 탭의 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions 페이지들에서는 Kubeflow Pipeline과 Pipeline의 실행 그리고 Pipeline Run의 결과를 관리합니다.

left-tabs

Kubeflow Pipeline이 모두의 MLOps에서 Kubeflow를 사용하는 주된 이유이며, Kubeflow Pipeline을 만드는 방법, 실행하는 방법, 결과를 확인하는 방법 등 자세한 내용은 3.Kubeflow에서 다룹니다.

- +
버전: 1.0

6. Kubeflow Pipeline 관련

Central Dashboard의 왼쪽 탭의 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions 페이지들에서는 Kubeflow Pipeline과 Pipeline의 실행 그리고 Pipeline Run의 결과를 관리합니다.

left-tabs

Kubeflow Pipeline이 모두의 MLOps에서 Kubeflow를 사용하는 주된 이유이며, Kubeflow Pipeline을 만드는 방법, 실행하는 방법, 결과를 확인하는 방법 등 자세한 내용은 3.Kubeflow에서 다룹니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow-dashboard-guide/experiments/index.html b/docs/1.0/kubeflow-dashboard-guide/experiments/index.html index 2e829cb0..bce1c383 100644 --- a/docs/1.0/kubeflow-dashboard-guide/experiments/index.html +++ b/docs/1.0/kubeflow-dashboard-guide/experiments/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

5. Experiments(AutoML)

다음으로는 Central Dashboard의 왼쪽 탭의 Experiments(AutoML)을 클릭해보겠습니다.

left-tabs

automl

Experiments(AutoML) 페이지는 Kubeflow에서 Hyperparameter Tuning과 Neural Architecture Search를 통한 AutoML을 담당하는 Katib를 관리할 수 있는 페이지입니다.

Katib와 Experiments(AutoML)에 대한 사용법은 모두의 MLOps v1.0에서는 다루지 않으며, v2.0에 추가될 예정입니다.

- +
버전: 1.0

5. Experiments(AutoML)

다음으로는 Central Dashboard의 왼쪽 탭의 Experiments(AutoML)을 클릭해보겠습니다.

left-tabs

automl

Experiments(AutoML) 페이지는 Kubeflow에서 Hyperparameter Tuning과 Neural Architecture Search를 통한 AutoML을 담당하는 Katib를 관리할 수 있는 페이지입니다.

Katib와 Experiments(AutoML)에 대한 사용법은 모두의 MLOps v1.0에서는 다루지 않으며, v2.0에 추가될 예정입니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow-dashboard-guide/intro/index.html b/docs/1.0/kubeflow-dashboard-guide/intro/index.html index 4df985eb..980b3f79 100644 --- a/docs/1.0/kubeflow-dashboard-guide/intro/index.html +++ b/docs/1.0/kubeflow-dashboard-guide/intro/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

1. Central Dashboard

Kubeflow 설치를 완료하면, 다음 커맨드를 통해 대시보드에 접속할 수 있습니다.

kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

after-login

Central Dashboard는 Kubeflow에서 제공하는 모든 기능을 통합하여 제공하는 UI입니다. Central Dashboard에서 제공하는 기능은 크게 왼쪽의 탭을 기준으로 구분할 수 있습니다.

left-tabs

  • Home
  • Notebooks
  • Tensorboards
  • Volumes
  • Models
  • Experiments(AutoML)
  • Experiments(KFP)
  • Pipelines
  • Runs
  • Recurring Runs
  • Artifacts
  • Executions

그럼 이제 기능별 간단한 사용법을 알아보겠습니다.

- +
버전: 1.0

1. Central Dashboard

Kubeflow 설치를 완료하면, 다음 커맨드를 통해 대시보드에 접속할 수 있습니다.

kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

after-login

Central Dashboard는 Kubeflow에서 제공하는 모든 기능을 통합하여 제공하는 UI입니다. Central Dashboard에서 제공하는 기능은 크게 왼쪽의 탭을 기준으로 구분할 수 있습니다.

left-tabs

  • Home
  • Notebooks
  • Tensorboards
  • Volumes
  • Models
  • Experiments(AutoML)
  • Experiments(KFP)
  • Pipelines
  • Runs
  • Recurring Runs
  • Artifacts
  • Executions

그럼 이제 기능별 간단한 사용법을 알아보겠습니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html b/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html index bb62402c..337bcf6b 100644 --- a/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html +++ b/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html @@ -7,14 +7,14 @@ - +
버전: 1.0

2. Notebooks

노트북 서버(Notebook Server) 생성하기

다음 Central Dashboard의 왼쪽 탭의 Notebooks를 클릭해보겠습니다.

left-tabs

다음과 같은 화면을 볼 수 있습니다.

Notebooks 탭은 JupyterHub와 비슷하게 유저별로 jupyter notebook 및 code server 환경(이하 노트북 서버)을 독립적으로 생성하고 접속할 수 있는 페이지입니다.

notebook-home

오른쪽 위의 + NEW NOTEBOOK 버튼을 클릭합니다.

new-notebook

아래와 같은 화면이 나타나면, 이제 생성할 노트북 서버의 스펙(Spec)을 명시하여 생성합니다.

create

각 스펙에 대한 자세한 내용은 아래와 같습니다.
  • name:
    • 노트북 서버를 구분할 수 있는 이름으로 생성합니다.
  • namespace :
    • 따로 변경할 수 없습니다. (현재 로그인한 user 계정의 namespace이 자동으로 지정되어 있습니다.)
  • Image:
    • sklearn, pytorch, tensorflow 등의 파이썬 패키지가 미리 설치된 jupyter lab 이미지 중 사용할 이미지를 선택합니다.
      • 노트북 서버 내에서 GPU를 사용하여 tensorflow-cuda, pytorch-cuda 등의 이미지를 사용하는 경우, 하단의 GPUs 부분을 확인하시기 바랍니다.
    • 추가적인 패키지나 소스코드 등을 포함한 커스텀(Custom) 노트북 서버를 사용하고 싶은 경우에는 커스텀 이미지(Custom Image)를 만들고 배포 후 사용할 수도 있습니다.
  • CPU / RAM
    • 필요한 자원 사용량을 입력합니다.
      • cpu : core 단위
        • 가상 core 개수 단위를 의미하며, int 형식이 아닌 1.5, 2.7 등의 float 형식도 입력할 수 있습니다.
      • memory : Gi 단위
  • GPUs
    • 주피터 노트북에 할당할 GPU 개수를 입력합니다.
      • None
        • GPU 자원이 필요하지 않은 상황
      • 1, 2, 4
        • GPU 1, 2, 4 개 할당
    • GPU Vendor
      • 앞의 (Optional) Setup GPU 를 따라 nvidia gpu plugin을 설치하였다면 NVIDIA를 선택합니다.
  • Workspace Volume
    • 노트북 서버 내에서 필요한 만큼의 디스크 용량을 입력합니다.
    • Type 과 Name 은 변경하지 않고, 디스크 용량을 늘리고 싶거나 AccessMode 를 변경하고 싶을 때에만 변경해서 사용하시면 됩니다.
      • "Don't use Persistent Storage for User's home" 체크박스는 노트북 서버의 작업 내용을 저장하지 않아도 상관없을 때에만 클릭합니다. 일반적으로는 누르지 않는 것을 권장합니다.
      • 기존에 미리 생성해두었던 PVC를 사용하고 싶을 때에는, Type을 "Existing" 으로 입력하여 해당 PVC의 이름을 입력하여 사용하시면 됩니다.
  • Data Volumes
    • 추가적인 스토리지 자원이 필요하다면 "+ ADD VOLUME" 버튼을 클릭하여 생성할 수 있습니다.
  • Configurations, Affinity/Tolerations, Miscellaneous Settings
    • 일반적으로는 필요하지 않으므로 모두의 MLOps에서는 자세한 설명을 생략합니다.

모두 정상적으로 입력하였다면 하단의 LAUNCH 버튼이 활성화되며, 버튼을 클릭하면 노트북 서버 생성이 시작됩니다.

creating

생성 후 아래와 같이 Status 가 초록색 체크 표시 아이콘으로 변하며, CONNECT 버튼이 활성화됩니다.

created


노트북 서버 접속하기

CONNECT 버튼을 클릭하면 브라우저에 새 창이 열리며, 다음과 같은 화면이 보입니다.

notebook-access

Launcher의 Notebook, Console, Terminal 아이콘을 클릭하여 사용할 수 있습니다.

생성된 Notebook 화면

notebook-console

생성된 Terminal 화면

terminal-console


노트북 서버 중단하기

노트북 서버를 오랜 시간 사용하지 않는 경우, 쿠버네티스 클러스터의 효율적인 리소스 사용을 위해서 노트북 서버를 중단(Stop)할 수 있습니다. 단, 이 경우 노트북 서버 생성 시 Workspace Volume 또는 Data Volume으로 지정해놓은 경로 외에 저장된 데이터는 모두 초기화되는 것에 주의하시기 바랍니다.
-노트북 서버 생성 당시 경로를 변경하지 않았다면, 디폴트(Default) Workspace Volume의 경로는 노트북 서버 내의 /home/jovyan 이므로, /home/jovyan 의 하위 경로 이외의 경로에 저장된 데이터는 모두 사라집니다.

다음과 같이 STOP 버튼을 클릭하면 노트북 서버가 중단됩니다.

notebook-stop

중단이 완료되면 다음과 같이 CONNECT 버튼이 비활성화되며, PLAY 버튼을 클릭하면 다시 정상적으로 사용할 수 있습니다.

notebook-restart

- +노트북 서버 생성 당시 경로를 변경하지 않았다면, 디폴트(Default) Workspace Volume의 경로는 노트북 서버 내의 /home/jovyan 이므로, /home/jovyan 의 하위 경로 이외의 경로에 저장된 데이터는 모두 사라집니다.

다음과 같이 STOP 버튼을 클릭하면 노트북 서버가 중단됩니다.

notebook-stop

중단이 완료되면 다음과 같이 CONNECT 버튼이 비활성화되며, PLAY 버튼을 클릭하면 다시 정상적으로 사용할 수 있습니다.

notebook-restart

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html b/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html index 93da8e39..05695deb 100644 --- a/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html +++ b/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

3. Tensorboards

다음으로는 Central Dashboard의 왼쪽 탭의 Tensorboards를 클릭해보겠습니다.

left-tabs

다음과 같은 화면을 볼 수 있습니다.

tensorboard

Tensorboards 탭은 Tensorflow, PyTorch 등의 프레임워크에서 제공하는 Tensorboard 유틸이 생성한 ML 학습 관련 데이터를 시각화하는 텐서보드 서버(Tensorboard Server)를 쿠버네티스 클러스터에 생성하는 기능을 제공합니다.

이렇게 생성한 텐서보드 서버는, 일반적인 원격 텐서보드 서버의 사용법과 같이 사용할 수도 있으며, Kubeflow 파이프라인 런에서 바로 텐서보드 서버에 데이터를 저장하는 용도로 활용할 수 있습니다.

Kubeflow 파이프라인 런의 결과를 시각화하는 방법에는 다양한 방식이 있으며, 모두의 MLOps에서는 더 일반적으로 활용할 수 있도록 Kubeflow 컴포넌트의 Visualization 기능과 MLflow의 시각화 기능을 활용할 예정이므로, Tensorboards 페이지에 대한 자세한 설명은 생략하겠습니다.

- +
버전: 1.0

3. Tensorboards

다음으로는 Central Dashboard의 왼쪽 탭의 Tensorboards를 클릭해보겠습니다.

left-tabs

다음과 같은 화면을 볼 수 있습니다.

tensorboard

Tensorboards 탭은 Tensorflow, PyTorch 등의 프레임워크에서 제공하는 Tensorboard 유틸이 생성한 ML 학습 관련 데이터를 시각화하는 텐서보드 서버(Tensorboard Server)를 쿠버네티스 클러스터에 생성하는 기능을 제공합니다.

이렇게 생성한 텐서보드 서버는, 일반적인 원격 텐서보드 서버의 사용법과 같이 사용할 수도 있으며, Kubeflow 파이프라인 런에서 바로 텐서보드 서버에 데이터를 저장하는 용도로 활용할 수 있습니다.

Kubeflow 파이프라인 런의 결과를 시각화하는 방법에는 다양한 방식이 있으며, 모두의 MLOps에서는 더 일반적으로 활용할 수 있도록 Kubeflow 컴포넌트의 Visualization 기능과 MLflow의 시각화 기능을 활용할 예정이므로, Tensorboards 페이지에 대한 자세한 설명은 생략하겠습니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow-dashboard-guide/volumes/index.html b/docs/1.0/kubeflow-dashboard-guide/volumes/index.html index 3bba1cc4..9d1cfab3 100644 --- a/docs/1.0/kubeflow-dashboard-guide/volumes/index.html +++ b/docs/1.0/kubeflow-dashboard-guide/volumes/index.html @@ -7,15 +7,15 @@ - +
버전: 1.0

4. Volumes

Volumes

다음으로는 Central Dashboard의 왼쪽 탭의 Volumes를 클릭해보겠습니다.

left-tabs

다음과 같은 화면을 볼 수 있습니다.

volumes

Volumes 탭은 Kubernetes의 볼륨(Volume), 정확히는 퍼시스턴트 볼륨 클레임(Persistent Volume Claim, 이하 pvc) 중 현재 user의 namespace에 속한 pvc를 관리하는 기능을 제공합니다.

위 스크린샷을 보면, 1. Notebooks 페이지에서 생성한 Volume의 정보를 확인할 수 있습니다. 해당 Volume의 Storage Class는 쿠버네티스 클러스터 설치 당시 설치한 Default Storage Class인 local-path로 설정되어있음을 확인할 수 있습니다.

이외에도 user namespace에 새로운 볼륨을 생성하거나, 조회하거나, 삭제하고 싶은 경우에 Volumes 페이지를 활용할 수 있습니다.


볼륨 생성하기

오른쪽 위의 + NEW VOLUME 버튼을 클릭하면 다음과 같은 화면을 볼 수 있습니다.

new-volume

name, size, storage class, access mode를 지정하여 생성할 수 있습니다.

원하는 리소스 스펙을 지정하여 생성하면 다음과 같이 볼륨의 Status가 Pending으로 조회됩니다. Status 아이콘에 마우스 커서를 가져다 대면 해당 볼륨은 mount하여 사용하는 first consumer가 나타날 때 실제로 생성을 진행한다(This volume will be bound when its first consumer is created.)는 메시지를 확인할 수 있습니다.
이는 실습을 진행하는 StorageClasslocal-path의 볼륨 생성 정책에 해당하며, 문제 상황이 아닙니다.
-해당 페이지에서 Status가 Pending 으로 보이더라도 해당 볼륨을 사용하길 원하는 노트북 서버 혹은 파드(Pod)에서는 해당 볼륨의 이름을 지정하여 사용할 수 있으며, 그때 실제로 볼륨 생성이 진행됩니다.

creating-volume

- +해당 페이지에서 Status가 Pending 으로 보이더라도 해당 볼륨을 사용하길 원하는 노트북 서버 혹은 파드(Pod)에서는 해당 볼륨의 이름을 지정하여 사용할 수 있으며, 그때 실제로 볼륨 생성이 진행됩니다.

creating-volume

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/advanced-component/index.html b/docs/1.0/kubeflow/advanced-component/index.html index b8380ca4..b6da2154 100644 --- a/docs/1.0/kubeflow/advanced-component/index.html +++ b/docs/1.0/kubeflow/advanced-component/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 바로 입력과 출력에서 받는 argument중 경로와 관련된 것들에 _path 접미사가 모두 사라졌습니다.
iris_data.outputs["data_path"] 가 아닌 iris_data.outputs["data"] 으로 접근하는 것을 확인할 수 있습니다.
이는 kubeflow에서 정한 법칙으로 InputPathOutputPath 으로 생성된 경로들은 파이프라인에서 접근할 때는 _path 접미사를 생략하여 접근합니다.

다만 방금 작성한 파이프라인을 업로드할 경우 실행이 되지 않습니다. -이유는 다음 페이지에서 설명합니다.

- +이유는 다음 페이지에서 설명합니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/advanced-environment/index.html b/docs/1.0/kubeflow/advanced-environment/index.html index 05779af4..3171b295 100644 --- a/docs/1.0/kubeflow/advanced-environment/index.html +++ b/docs/1.0/kubeflow/advanced-environment/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Kubeflow는 쿠버네티스를 이용하기 때문에 컴포넌트 래퍼는 각각 독립된 컨테이너 위에서 컴포넌트 콘텐츠를 실행합니다.

자세히 보면 생성된 만든 train_from_csv.yaml 에서 정해진 이미지는 image: python:3.7 입니다.

이제 어떤 이유 때문에 실행이 안 되는지 눈치채신 분들도 있을 것입니다.

python:3.7 이미지에는 우리가 사용하고자 하는 dill, pandas, sklearn 이 설치되어 있지 않습니다.
그러므로 실행할 때 해당 패키지가 존재하지 않는다는 에러와 함께 실행이 안 됩니다.

그럼 어떻게 패키지를 추가할 수 있을까요?

패키지 추가 방법

Kubeflow를 변환하는 과정에서 두 가지 방법을 통해 패키지를 추가할 수 있습니다.

  1. base_image 사용
  2. package_to_install 사용

컴포넌트를 컴파일할 때 사용했던 함수 create_component_from_func 가 어떤 argument들을 받을 수 있는지 확인해 보겠습니다.

def create_component_from_func(
func: Callable,
output_component_file: Optional[str] = None,
base_image: Optional[str] = None,
packages_to_install: List[str] = None,
annotations: Optional[Mapping[str, str]] = None,
):
  • func: 컴포넌트로 만들 컴포넌트 래퍼 함수
  • base_image: 컴포넌트 래퍼가 실행할 이미지
  • packages_to_install: 컴포넌트에서 사용해서 추가로 설치해야 하는 패키지

1. base_image

컴포넌트가 실행되는 순서를 좀 더 자세히 들여다보면 다음과 같습니다.

  1. docker pull base_image
  2. pip install packages_to_install
  3. run command

만약 컴포넌트가 사용하는 base_image에 패키지들이 전부 설치되어 있다면 추가적인 패키지 설치 없이 바로 사용할 수 있습니다.

예를 들어, 이번 페이지에서는 다음과 같은 Dockerfile을 작성하겠습니다.

FROM python:3.7

RUN pip install dill pandas scikit-learn

위의 Dockerfile을 이용해 이미지를 빌드해 보겠습니다. 실습에서 사용해볼 도커 허브는 ghcr입니다.
각자 환경에 맞추어서 도커 허브를 선택 후 업로드하면 됩니다.

docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image
docker push ghcr.io/mlops-for-all/base-image

이제 base_image를 입력해 보겠습니다.

from functools import partial
from kfp.components import InputPath, OutputPath, create_component_from_func

@partial(
create_component_from_func,
base_image="ghcr.io/mlops-for-all/base-image:latest",
)
def train_from_csv(
train_data_path: InputPath("csv"),
train_target_path: InputPath("csv"),
model_path: OutputPath("dill"),
kernel: str,
):
import dill
import pandas as pd

from sklearn.svm import SVC

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

if __name__ == "__main__":
train_from_csv.component_spec.save("train_from_csv.yaml")

이제 생성된 컴포넌트를 컴파일하면 다음과 같이 나옵니다.

name: Train from csv
inputs:
- {name: train_data, type: csv}
- {name: train_target, type: csv}
- {name: kernel, type: String}
outputs:
- {name: model, type: dill}
implementation:
container:
image: ghcr.io/mlops-for-all/base-image:latest
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path

def train_from_csv(
train_data_path,
train_target_path,
model_path,
kernel,
):
import dill
import pandas as pd

from sklearn.svm import SVC

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

import argparse
_parser = argparse.ArgumentParser(prog='Train from csv', description='')
_parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = train_from_csv(**_parsed_args)
args:
- --train-data
- {inputPath: train_data}
- --train-target
- {inputPath: train_target}
- --kernel
- {inputValue: kernel}
- --model
- {outputPath: model}

base_image가 우리가 설정한 값으로 바뀐 것을 확인할 수 있습니다.

2. packages_to_install

하지만 패키지가 추가될 때마다 docker 이미지를 계속해서 새로 생성하는 작업은 많은 시간이 소요됩니다. -이 때, packages_to_install argument 를 사용하면 패키지를 컨테이너에 쉽게 추가할 수 있습니다.

from functools import partial
from kfp.components import InputPath, OutputPath, create_component_from_func

@partial(
create_component_from_func,
packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
)
def train_from_csv(
train_data_path: InputPath("csv"),
train_target_path: InputPath("csv"),
model_path: OutputPath("dill"),
kernel: str,
):
import dill
import pandas as pd

from sklearn.svm import SVC

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

if __name__ == "__main__":
train_from_csv.component_spec.save("train_from_csv.yaml")

스크립트를 실행하면 다음과 같은 train_from_csv.yaml 파일이 생성됩니다.

name: Train from csv
inputs:
- {name: train_data, type: csv}
- {name: train_target, type: csv}
- {name: kernel, type: String}
outputs:
- {name: model, type: dill}
implementation:
container:
image: python:3.7
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
'scikit-learn==1.0.1' --user) && "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path

def train_from_csv(
train_data_path,
train_target_path,
model_path,
kernel,
):
import dill
import pandas as pd

from sklearn.svm import SVC

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

import argparse
_parser = argparse.ArgumentParser(prog='Train from csv', description='')
_parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = train_from_csv(**_parsed_args)
args:
- --train-data
- {inputPath: train_data}
- --train-target
- {inputPath: train_target}
- --kernel
- {inputValue: kernel}
- --model
- {outputPath: model}

위에 작성한 컴포넌트가 실행되는 순서를 좀 더 자세히 들여다보면 다음과 같습니다.

  1. docker pull python:3.7
  2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
  3. run command

생성된 yaml 파일을 자세히 보면, 다음과 같은 줄이 자동으로 추가되어 필요한 패키지가 설치되기 때문에 오류 없이 정상적으로 실행됩니다.

    command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
'scikit-learn==1.0.1' --user) && "$0" "$@"
- +이 때, packages_to_install argument 를 사용하면 패키지를 컨테이너에 쉽게 추가할 수 있습니다.

from functools import partial
from kfp.components import InputPath, OutputPath, create_component_from_func

@partial(
create_component_from_func,
packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
)
def train_from_csv(
train_data_path: InputPath("csv"),
train_target_path: InputPath("csv"),
model_path: OutputPath("dill"),
kernel: str,
):
import dill
import pandas as pd

from sklearn.svm import SVC

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

if __name__ == "__main__":
train_from_csv.component_spec.save("train_from_csv.yaml")

스크립트를 실행하면 다음과 같은 train_from_csv.yaml 파일이 생성됩니다.

name: Train from csv
inputs:
- {name: train_data, type: csv}
- {name: train_target, type: csv}
- {name: kernel, type: String}
outputs:
- {name: model, type: dill}
implementation:
container:
image: python:3.7
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
'scikit-learn==1.0.1' --user) && "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path

def train_from_csv(
train_data_path,
train_target_path,
model_path,
kernel,
):
import dill
import pandas as pd

from sklearn.svm import SVC

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

import argparse
_parser = argparse.ArgumentParser(prog='Train from csv', description='')
_parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = train_from_csv(**_parsed_args)
args:
- --train-data
- {inputPath: train_data}
- --train-target
- {inputPath: train_target}
- --kernel
- {inputValue: kernel}
- --model
- {outputPath: model}

위에 작성한 컴포넌트가 실행되는 순서를 좀 더 자세히 들여다보면 다음과 같습니다.

  1. docker pull python:3.7
  2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
  3. run command

생성된 yaml 파일을 자세히 보면, 다음과 같은 줄이 자동으로 추가되어 필요한 패키지가 설치되기 때문에 오류 없이 정상적으로 실행됩니다.

    command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
'scikit-learn==1.0.1' --user) && "$0" "$@"
+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/advanced-mlflow/index.html b/docs/1.0/kubeflow/advanced-mlflow/index.html index 056971a0..e4dc2f5c 100644 --- a/docs/1.0/kubeflow/advanced-mlflow/index.html +++ b/docs/1.0/kubeflow/advanced-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -22,8 +22,8 @@ 이 때 업로드되는 MLflow의 endpoint를 우리가 설치한 mlflow service 로 이어지게 설정해주어야 합니다.
이 때 S3 Endpoint의 주소는 MLflow Server 설치 당시 설치한 minio의 쿠버네티스 서비스 DNS 네임을 활용합니다. 해당 service 는 kubeflow namespace에서 minio-service라는 이름으로 생성되었으므로, http://minio-service.kubeflow.svc:9000 로 설정합니다.
이와 비슷하게 tracking_uri의 주소는 mlflow server의 쿠버네티스 서비스 DNS 네임을 활용하여, http://mlflow-server-service.mlflow-system.svc:5000 로 설정합니다.

from functools import partial
from kfp.components import InputPath, create_component_from_func

@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
)
def upload_sklearn_model_to_mlflow(
model_name: str,
model_path: InputPath("dill"),
input_example_path: InputPath("dill"),
signature_path: InputPath("dill"),
conda_env_path: InputPath("dill"),
):
import os
import dill
from mlflow.sklearn import save_model

from mlflow.tracking.client import MlflowClient

os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
os.environ["AWS_ACCESS_KEY_ID"] = "minio"
os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

with open(model_path, mode="rb") as file_reader:
clf = dill.load(file_reader)

with open(input_example_path, "rb") as file_reader:
input_example = dill.load(file_reader)

with open(signature_path, "rb") as file_reader:
signature = dill.load(file_reader)

with open(conda_env_path, "rb") as file_reader:
conda_env = dill.load(file_reader)

save_model(
sk_model=clf,
path=model_name,
serialization_format="cloudpickle",
conda_env=conda_env,
signature=signature,
input_example=input_example,
)
run = client.create_run(experiment_id="0")
client.log_artifact(run.info.run_id, model_name)

MLFlow Pipeline

이제 작성한 컴포넌트들을 연결해서 파이프라인으로 만들어 보겠습니다.

Data Component

모델을 학습할 때 쓸 데이터는 sklearn의 iris 입니다. -데이터를 생성하는 컴포넌트를 작성합니다.

from functools import partial

from kfp.components import InputPath, OutputPath, create_component_from_func


@partial(
create_component_from_func,
packages_to_install=["pandas", "scikit-learn"],
)
def load_iris_data(
data_path: OutputPath("csv"),
target_path: OutputPath("csv"),
):
import pandas as pd
from sklearn.datasets import load_iris

iris = load_iris()

data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
target = pd.DataFrame(iris["target"], columns=["target"])

data.to_csv(data_path, index=False)
target.to_csv(target_path, index=False)

Pipeline

파이프라인 코드는 다음과 같이 작성할 수 있습니다.

from kfp.dsl import pipeline


@pipeline(name="mlflow_pipeline")
def mlflow_pipeline(kernel: str, model_name: str):
iris_data = load_iris_data()
model = train_from_csv(
train_data=iris_data.outputs["data"],
train_target=iris_data.outputs["target"],
kernel=kernel,
)
_ = upload_sklearn_model_to_mlflow(
model_name=model_name,
model=model.outputs["model"],
input_example=model.outputs["input_example"],
signature=model.outputs["signature"],
conda_env=model.outputs["conda_env"],
)

Run

위에서 작성된 컴포넌트와 파이프라인을 하나의 파이썬 파일에 정리하면 다음과 같습니다.

from functools import partial

import kfp
from kfp.components import InputPath, OutputPath, create_component_from_func
from kfp.dsl import pipeline


@partial(
create_component_from_func,
packages_to_install=["pandas", "scikit-learn"],
)
def load_iris_data(
data_path: OutputPath("csv"),
target_path: OutputPath("csv"),
):
import pandas as pd
from sklearn.datasets import load_iris

iris = load_iris()

data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
target = pd.DataFrame(iris["target"], columns=["target"])

data.to_csv(data_path, index=False)
target.to_csv(target_path, index=False)


@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
)
def train_from_csv(
train_data_path: InputPath("csv"),
train_target_path: InputPath("csv"),
model_path: OutputPath("dill"),
input_example_path: OutputPath("dill"),
signature_path: OutputPath("dill"),
conda_env_path: OutputPath("dill"),
kernel: str,
):
import dill
import pandas as pd
from sklearn.svm import SVC

from mlflow.models.signature import infer_signature
from mlflow.utils.environment import _mlflow_conda_env

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

input_example = train_data.sample(1)
with open(input_example_path, "wb") as file_writer:
dill.dump(input_example, file_writer)

signature = infer_signature(train_data, clf.predict(train_data))
with open(signature_path, "wb") as file_writer:
dill.dump(signature, file_writer)

conda_env = _mlflow_conda_env(
additional_pip_deps=["dill", "pandas", "scikit-learn"]
)
with open(conda_env_path, "wb") as file_writer:
dill.dump(conda_env, file_writer)


@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
)
def upload_sklearn_model_to_mlflow(
model_name: str,
model_path: InputPath("dill"),
input_example_path: InputPath("dill"),
signature_path: InputPath("dill"),
conda_env_path: InputPath("dill"),
):
import os
import dill
from mlflow.sklearn import save_model

from mlflow.tracking.client import MlflowClient

os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
os.environ["AWS_ACCESS_KEY_ID"] = "minio"
os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

with open(model_path, mode="rb") as file_reader:
clf = dill.load(file_reader)

with open(input_example_path, "rb") as file_reader:
input_example = dill.load(file_reader)

with open(signature_path, "rb") as file_reader:
signature = dill.load(file_reader)

with open(conda_env_path, "rb") as file_reader:
conda_env = dill.load(file_reader)

save_model(
sk_model=clf,
path=model_name,
serialization_format="cloudpickle",
conda_env=conda_env,
signature=signature,
input_example=input_example,
)
run = client.create_run(experiment_id="0")
client.log_artifact(run.info.run_id, model_name)


@pipeline(name="mlflow_pipeline")
def mlflow_pipeline(kernel: str, model_name: str):
iris_data = load_iris_data()
model = train_from_csv(
train_data=iris_data.outputs["data"],
train_target=iris_data.outputs["target"],
kernel=kernel,
)
_ = upload_sklearn_model_to_mlflow(
model_name=model_name,
model=model.outputs["model"],
input_example=model.outputs["input_example"],
signature=model.outputs["signature"],
conda_env=model.outputs["conda_env"],
)


if __name__ == "__main__":
kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

mlflow_pipeline.yaml
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: mlflow-pipeline-
annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
"String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
spec:
entrypoint: mlflow-pipeline
templates:
- name: load-iris-data
container:
args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
&& "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path

def load_iris_data(
data_path,
target_path,
):
import pandas as pd
from sklearn.datasets import load_iris

iris = load_iris()

data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
target = pd.DataFrame(iris["target"], columns=["target"])

data.to_csv(data_path, index=False)
target.to_csv(target_path, index=False)

import argparse
_parser = argparse.ArgumentParser(prog='Load iris data', description='')
_parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = load_iris_data(**_parsed_args)
image: python:3.7
outputs:
artifacts:
- {name: load-iris-data-data, path: /tmp/outputs/data/data}
- {name: load-iris-data-target, path: /tmp/outputs/target/data}
metadata:
labels:
pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
pipelines.kubeflow.org/pipeline-sdk-type: kfp
pipelines.kubeflow.org/enable_caching: "true"
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
"command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
\"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
"def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
= pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
= pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
= argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
"image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
"data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
- name: mlflow-pipeline
inputs:
parameters:
- {name: kernel}
- {name: model_name}
dag:
tasks:
- {name: load-iris-data, template: load-iris-data}
- name: train-from-csv
template: train-from-csv
dependencies: [load-iris-data]
arguments:
parameters:
- {name: kernel, value: '{{inputs.parameters.kernel}}'}
artifacts:
- {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
- {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
- name: upload-sklearn-model-to-mlflow
template: upload-sklearn-model-to-mlflow
dependencies: [train-from-csv]
arguments:
parameters:
- {name: model_name, value: '{{inputs.parameters.model_name}}'}
artifacts:
- {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
- {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
- {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
- {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
- name: train-from-csv
container:
args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
--kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
--input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
--conda-env, /tmp/outputs/conda_env/data]
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
'mlflow' --user) && "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path

def train_from_csv(
train_data_path,
train_target_path,
model_path,
input_example_path,
signature_path,
conda_env_path,
kernel,
):
import dill
import pandas as pd
from sklearn.svm import SVC

from mlflow.models.signature import infer_signature
from mlflow.utils.environment import _mlflow_conda_env

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

input_example = train_data.sample(1)
with open(input_example_path, "wb") as file_writer:
dill.dump(input_example, file_writer)

signature = infer_signature(train_data, clf.predict(train_data))
with open(signature_path, "wb") as file_writer:
dill.dump(signature, file_writer)

conda_env = _mlflow_conda_env(
additional_pip_deps=["dill", "pandas", "scikit-learn"]
)
with open(conda_env_path, "wb") as file_writer:
dill.dump(conda_env, file_writer)

import argparse
_parser = argparse.ArgumentParser(prog='Train from csv', description='')
_parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = train_from_csv(**_parsed_args)
image: python:3.7
inputs:
parameters:
- {name: kernel}
artifacts:
- {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
- {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
outputs:
artifacts:
- {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
- {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
- {name: train-from-csv-model, path: /tmp/outputs/model/data}
- {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
metadata:
labels:
pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
pipelines.kubeflow.org/pipeline-sdk-type: kfp
pipelines.kubeflow.org/enable_caching: "true"
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
{"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
{"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
"--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
"conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
\"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
"def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
mlflow.models.signature import infer_signature\n from mlflow.utils.environment
import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
= pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
\"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
= infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
\"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
= _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
\"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
"image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
{"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
"name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
{"name": "input_example", "type": "dill"}, {"name": "signature", "type":
"dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
- name: upload-sklearn-model-to-mlflow
container:
args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
--input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
--conda-env, /tmp/inputs/conda_env/data]
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
'mlflow' 'boto3' --user) && "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def upload_sklearn_model_to_mlflow(
model_name,
model_path,
input_example_path,
signature_path,
conda_env_path,
):
import os
import dill
from mlflow.sklearn import save_model

from mlflow.tracking.client import MlflowClient

os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
os.environ["AWS_ACCESS_KEY_ID"] = "minio"
os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

with open(model_path, mode="rb") as file_reader:
clf = dill.load(file_reader)

with open(input_example_path, "rb") as file_reader:
input_example = dill.load(file_reader)

with open(signature_path, "rb") as file_reader:
signature = dill.load(file_reader)

with open(conda_env_path, "rb") as file_reader:
conda_env = dill.load(file_reader)

save_model(
sk_model=clf,
path=model_name,
serialization_format="cloudpickle",
conda_env=conda_env,
signature=signature,
input_example=input_example,
)
run = client.create_run(experiment_id="0")
client.log_artifact(run.info.run_id, model_name)

import argparse
_parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
_parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
image: python:3.7
inputs:
parameters:
- {name: model_name}
artifacts:
- {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
- {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
- {name: train-from-csv-model, path: /tmp/inputs/model/data}
- {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
metadata:
labels:
pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
pipelines.kubeflow.org/pipeline-sdk-type: kfp
pipelines.kubeflow.org/enable_caching: "true"
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
"model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
{"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
"command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
--quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
\"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
"def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
os\n import dill\n from mlflow.sklearn import save_model\n\n from
mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
= \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
= \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
= MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
open(input_example_path, \"rb\") as file_reader:\n input_example
= dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
= dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
= dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
= client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
"image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
{"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
{"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
"name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
arguments:
parameters:
- {name: kernel}
- {name: model_name}
serviceAccountName: pipeline-runner

실행후 생성된 mlflow_pipeline.yaml 파일을 파이프라인 업로드한 후, 실행하여 run 의 결과를 확인합니다.

mlflow-svc-0

mlflow service를 포트포워딩해서 MLflow ui에 접속합니다.

kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

웹 브라우저를 열어 localhost:5000으로 접속하면, 다음과 같이 run이 생성된 것을 확인할 수 있습니다.

mlflow-svc-1

run 을 클릭해서 확인하면 학습한 모델 파일이 있는 것을 확인할 수 있습니다.

mlflow-svc-2

- +데이터를 생성하는 컴포넌트를 작성합니다.

from functools import partial

from kfp.components import InputPath, OutputPath, create_component_from_func


@partial(
create_component_from_func,
packages_to_install=["pandas", "scikit-learn"],
)
def load_iris_data(
data_path: OutputPath("csv"),
target_path: OutputPath("csv"),
):
import pandas as pd
from sklearn.datasets import load_iris

iris = load_iris()

data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
target = pd.DataFrame(iris["target"], columns=["target"])

data.to_csv(data_path, index=False)
target.to_csv(target_path, index=False)

Pipeline

파이프라인 코드는 다음과 같이 작성할 수 있습니다.

from kfp.dsl import pipeline


@pipeline(name="mlflow_pipeline")
def mlflow_pipeline(kernel: str, model_name: str):
iris_data = load_iris_data()
model = train_from_csv(
train_data=iris_data.outputs["data"],
train_target=iris_data.outputs["target"],
kernel=kernel,
)
_ = upload_sklearn_model_to_mlflow(
model_name=model_name,
model=model.outputs["model"],
input_example=model.outputs["input_example"],
signature=model.outputs["signature"],
conda_env=model.outputs["conda_env"],
)

Run

위에서 작성된 컴포넌트와 파이프라인을 하나의 파이썬 파일에 정리하면 다음과 같습니다.

from functools import partial

import kfp
from kfp.components import InputPath, OutputPath, create_component_from_func
from kfp.dsl import pipeline


@partial(
create_component_from_func,
packages_to_install=["pandas", "scikit-learn"],
)
def load_iris_data(
data_path: OutputPath("csv"),
target_path: OutputPath("csv"),
):
import pandas as pd
from sklearn.datasets import load_iris

iris = load_iris()

data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
target = pd.DataFrame(iris["target"], columns=["target"])

data.to_csv(data_path, index=False)
target.to_csv(target_path, index=False)


@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
)
def train_from_csv(
train_data_path: InputPath("csv"),
train_target_path: InputPath("csv"),
model_path: OutputPath("dill"),
input_example_path: OutputPath("dill"),
signature_path: OutputPath("dill"),
conda_env_path: OutputPath("dill"),
kernel: str,
):
import dill
import pandas as pd
from sklearn.svm import SVC

from mlflow.models.signature import infer_signature
from mlflow.utils.environment import _mlflow_conda_env

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

input_example = train_data.sample(1)
with open(input_example_path, "wb") as file_writer:
dill.dump(input_example, file_writer)

signature = infer_signature(train_data, clf.predict(train_data))
with open(signature_path, "wb") as file_writer:
dill.dump(signature, file_writer)

conda_env = _mlflow_conda_env(
additional_pip_deps=["dill", "pandas", "scikit-learn"]
)
with open(conda_env_path, "wb") as file_writer:
dill.dump(conda_env, file_writer)


@partial(
create_component_from_func,
packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
)
def upload_sklearn_model_to_mlflow(
model_name: str,
model_path: InputPath("dill"),
input_example_path: InputPath("dill"),
signature_path: InputPath("dill"),
conda_env_path: InputPath("dill"),
):
import os
import dill
from mlflow.sklearn import save_model

from mlflow.tracking.client import MlflowClient

os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
os.environ["AWS_ACCESS_KEY_ID"] = "minio"
os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

with open(model_path, mode="rb") as file_reader:
clf = dill.load(file_reader)

with open(input_example_path, "rb") as file_reader:
input_example = dill.load(file_reader)

with open(signature_path, "rb") as file_reader:
signature = dill.load(file_reader)

with open(conda_env_path, "rb") as file_reader:
conda_env = dill.load(file_reader)

save_model(
sk_model=clf,
path=model_name,
serialization_format="cloudpickle",
conda_env=conda_env,
signature=signature,
input_example=input_example,
)
run = client.create_run(experiment_id="0")
client.log_artifact(run.info.run_id, model_name)


@pipeline(name="mlflow_pipeline")
def mlflow_pipeline(kernel: str, model_name: str):
iris_data = load_iris_data()
model = train_from_csv(
train_data=iris_data.outputs["data"],
train_target=iris_data.outputs["target"],
kernel=kernel,
)
_ = upload_sklearn_model_to_mlflow(
model_name=model_name,
model=model.outputs["model"],
input_example=model.outputs["input_example"],
signature=model.outputs["signature"],
conda_env=model.outputs["conda_env"],
)


if __name__ == "__main__":
kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

mlflow_pipeline.yaml
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: mlflow-pipeline-
annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
"String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
spec:
entrypoint: mlflow-pipeline
templates:
- name: load-iris-data
container:
args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
&& "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path

def load_iris_data(
data_path,
target_path,
):
import pandas as pd
from sklearn.datasets import load_iris

iris = load_iris()

data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
target = pd.DataFrame(iris["target"], columns=["target"])

data.to_csv(data_path, index=False)
target.to_csv(target_path, index=False)

import argparse
_parser = argparse.ArgumentParser(prog='Load iris data', description='')
_parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = load_iris_data(**_parsed_args)
image: python:3.7
outputs:
artifacts:
- {name: load-iris-data-data, path: /tmp/outputs/data/data}
- {name: load-iris-data-target, path: /tmp/outputs/target/data}
metadata:
labels:
pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
pipelines.kubeflow.org/pipeline-sdk-type: kfp
pipelines.kubeflow.org/enable_caching: "true"
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
"command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
\"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
"def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
= pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
= pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
= argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
"image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
"data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
- name: mlflow-pipeline
inputs:
parameters:
- {name: kernel}
- {name: model_name}
dag:
tasks:
- {name: load-iris-data, template: load-iris-data}
- name: train-from-csv
template: train-from-csv
dependencies: [load-iris-data]
arguments:
parameters:
- {name: kernel, value: '{{inputs.parameters.kernel}}'}
artifacts:
- {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
- {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
- name: upload-sklearn-model-to-mlflow
template: upload-sklearn-model-to-mlflow
dependencies: [train-from-csv]
arguments:
parameters:
- {name: model_name, value: '{{inputs.parameters.model_name}}'}
artifacts:
- {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
- {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
- {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
- {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
- name: train-from-csv
container:
args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
--kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
--input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
--conda-env, /tmp/outputs/conda_env/data]
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
'mlflow' --user) && "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path

def train_from_csv(
train_data_path,
train_target_path,
model_path,
input_example_path,
signature_path,
conda_env_path,
kernel,
):
import dill
import pandas as pd
from sklearn.svm import SVC

from mlflow.models.signature import infer_signature
from mlflow.utils.environment import _mlflow_conda_env

train_data = pd.read_csv(train_data_path)
train_target = pd.read_csv(train_target_path)

clf = SVC(kernel=kernel)
clf.fit(train_data, train_target)

with open(model_path, mode="wb") as file_writer:
dill.dump(clf, file_writer)

input_example = train_data.sample(1)
with open(input_example_path, "wb") as file_writer:
dill.dump(input_example, file_writer)

signature = infer_signature(train_data, clf.predict(train_data))
with open(signature_path, "wb") as file_writer:
dill.dump(signature, file_writer)

conda_env = _mlflow_conda_env(
additional_pip_deps=["dill", "pandas", "scikit-learn"]
)
with open(conda_env_path, "wb") as file_writer:
dill.dump(conda_env, file_writer)

import argparse
_parser = argparse.ArgumentParser(prog='Train from csv', description='')
_parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = train_from_csv(**_parsed_args)
image: python:3.7
inputs:
parameters:
- {name: kernel}
artifacts:
- {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
- {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
outputs:
artifacts:
- {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
- {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
- {name: train-from-csv-model, path: /tmp/outputs/model/data}
- {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
metadata:
labels:
pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
pipelines.kubeflow.org/pipeline-sdk-type: kfp
pipelines.kubeflow.org/enable_caching: "true"
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
{"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
{"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
"--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
"conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
\"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
"def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
mlflow.models.signature import infer_signature\n from mlflow.utils.environment
import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
= pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
\"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
= infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
\"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
= _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
\"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
"image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
{"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
"name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
{"name": "input_example", "type": "dill"}, {"name": "signature", "type":
"dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
- name: upload-sklearn-model-to-mlflow
container:
args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
--input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
--conda-env, /tmp/inputs/conda_env/data]
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
'mlflow' 'boto3' --user) && "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def upload_sklearn_model_to_mlflow(
model_name,
model_path,
input_example_path,
signature_path,
conda_env_path,
):
import os
import dill
from mlflow.sklearn import save_model

from mlflow.tracking.client import MlflowClient

os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
os.environ["AWS_ACCESS_KEY_ID"] = "minio"
os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

with open(model_path, mode="rb") as file_reader:
clf = dill.load(file_reader)

with open(input_example_path, "rb") as file_reader:
input_example = dill.load(file_reader)

with open(signature_path, "rb") as file_reader:
signature = dill.load(file_reader)

with open(conda_env_path, "rb") as file_reader:
conda_env = dill.load(file_reader)

save_model(
sk_model=clf,
path=model_name,
serialization_format="cloudpickle",
conda_env=conda_env,
signature=signature,
input_example=input_example,
)
run = client.create_run(experiment_id="0")
client.log_artifact(run.info.run_id, model_name)

import argparse
_parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
_parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
image: python:3.7
inputs:
parameters:
- {name: model_name}
artifacts:
- {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
- {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
- {name: train-from-csv-model, path: /tmp/inputs/model/data}
- {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
metadata:
labels:
pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
pipelines.kubeflow.org/pipeline-sdk-type: kfp
pipelines.kubeflow.org/enable_caching: "true"
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
"model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
{"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
"command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
--quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
\"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
"def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
os\n import dill\n from mlflow.sklearn import save_model\n\n from
mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
= \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
= \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
= MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
open(input_example_path, \"rb\") as file_reader:\n input_example
= dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
= dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
= dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
= client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
"image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
{"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
{"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
"name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
arguments:
parameters:
- {name: kernel}
- {name: model_name}
serviceAccountName: pipeline-runner

실행후 생성된 mlflow_pipeline.yaml 파일을 파이프라인 업로드한 후, 실행하여 run 의 결과를 확인합니다.

mlflow-svc-0

mlflow service를 포트포워딩해서 MLflow ui에 접속합니다.

kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

웹 브라우저를 열어 localhost:5000으로 접속하면, 다음과 같이 run이 생성된 것을 확인할 수 있습니다.

mlflow-svc-1

run 을 클릭해서 확인하면 학습한 모델 파일이 있는 것을 확인할 수 있습니다.

mlflow-svc-2

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/advanced-pipeline/index.html b/docs/1.0/kubeflow/advanced-pipeline/index.html index fc42bd1c..03b0977a 100644 --- a/docs/1.0/kubeflow/advanced-pipeline/index.html +++ b/docs/1.0/kubeflow/advanced-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ 만약 GPU를 사용해 모델을 학습해야 할 때 쿠버네티스상에서 GPU를 할당받지 못해 제대로 학습이 이루어지지 않습니다.
이를 위해 set_gpu_limit() attribute을 이용해 설정할 수 있습니다.

import kfp
from kfp.components import create_component_from_func
from kfp.dsl import pipeline


@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number


@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int):
print(number_1 + number_2)


@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)


if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

위의 스크립트를 실행하면 생성된 파일에서 sum-and-print-numbers를 자세히 보면 resources에 {nvidia.com/gpu: 1} 도 추가된 것을 볼 수 있습니다. 이를 통해 GPU를 할당받을 수 있습니다.

  - name: sum-and-print-numbers
container:
args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
'{{inputs.parameters.print-and-return-number-2-Output}}']
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def sum_and_print_numbers(number_1, number_2):
print(number_1 + number_2)

import argparse
_parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
_parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = sum_and_print_numbers(**_parsed_args)
image: python:3.7
resources:
limits: {nvidia.com/gpu: 1}

CPU

cpu의 개수를 정하기 위해서 이용하는 함수는 .set_cpu_limit() attribute을 이용해 설정할 수 있습니다.
-gpu와는 다른 점은 int가 아닌 string으로 입력해야 한다는 점입니다.

import kfp
from kfp.components import create_component_from_func
from kfp.dsl import pipeline


@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number


@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int):
print(number_1 + number_2)


@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

바뀐 부분만 확인하면 다음과 같습니다.

      resources:
limits: {nvidia.com/gpu: 1, cpu: '16'}

Memory

메모리는 .set_memory_limit() attribute을 이용해 설정할 수 있습니다.

import kfp
from kfp.components import create_component_from_func
from kfp.dsl import pipeline


@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number


@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int):
print(number_1 + number_2)


@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

바뀐 부분만 확인하면 다음과 같습니다.

      resources:
limits: {nvidia.com/gpu: 1, memory: 1G}
- +gpu와는 다른 점은 int가 아닌 string으로 입력해야 한다는 점입니다.

import kfp
from kfp.components import create_component_from_func
from kfp.dsl import pipeline


@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number


@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int):
print(number_1 + number_2)


@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

바뀐 부분만 확인하면 다음과 같습니다.

      resources:
limits: {nvidia.com/gpu: 1, cpu: '16'}

Memory

메모리는 .set_memory_limit() attribute을 이용해 설정할 수 있습니다.

import kfp
from kfp.components import create_component_from_func
from kfp.dsl import pipeline


@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number


@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int):
print(number_1 + number_2)


@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

바뀐 부분만 확인하면 다음과 같습니다.

      resources:
limits: {nvidia.com/gpu: 1, memory: 1G}
+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/advanced-run/index.html b/docs/1.0/kubeflow/advanced-run/index.html index 3f517eb3..eeb07e44 100644 --- a/docs/1.0/kubeflow/advanced-run/index.html +++ b/docs/1.0/kubeflow/advanced-run/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 이때는 Output artifacts의 main-logs에서 확인할 수 있습니다.

Visualizations

Visualizations에서는 컴포넌트에서 생성된 플랏을 보여줍니다.

플랏을 생성하기 위해서는 mlpipeline_ui_metadata: OutputPath("UI_Metadata") argument로 보여주고 싶은 값을 저장하면 됩니다. 이 때 플랏의 형태는 html 포맷이어야 합니다. 변환하는 과정은 다음과 같습니다.


@partial(
create_component_from_func,
packages_to_install=["matplotlib"],
)
def plot_linear(
mlpipeline_ui_metadata: OutputPath("UI_Metadata")
):
import base64
import json
from io import BytesIO

import matplotlib.pyplot as plt

plt.plot(x=[1, 2, 3], y=[1, 2,3])

tmpfile = BytesIO()
plt.savefig(tmpfile, format="png")
encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

html = f"<img src='data:image/png;base64,{encoded}'>"
metadata = {
"outputs": [
{
"type": "web-app",
"storage": "inline",
"source": html,
},
],
}
with open(mlpipeline_ui_metadata, "w") as html_writer:
json.dump(metadata, html_writer)

파이프라인으로 작성하면 다음과 같이 됩니다.

from functools import partial

import kfp
from kfp.components import create_component_from_func, OutputPath
from kfp.dsl import pipeline


@partial(
create_component_from_func,
packages_to_install=["matplotlib"],
)
def plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):
import base64
import json
from io import BytesIO

import matplotlib.pyplot as plt

plt.plot([1, 2, 3], [1, 2, 3])

tmpfile = BytesIO()
plt.savefig(tmpfile, format="png")
encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

html = f"<img src='data:image/png;base64,{encoded}'>"
metadata = {
"outputs": [
{
"type": "web-app",
"storage": "inline",
"source": html,
},
],
}
with open(mlpipeline_ui_metadata, "w") as html_writer:
json.dump(metadata, html_writer)


@pipeline(name="plot_pipeline")
def plot_pipeline():
plot_linear()


if __name__ == "__main__":
kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")

이 스크립트를 실행해서 나온 plot_pipeline.yaml을 확인하면 다음과 같습니다.

plot_pipeline.yaml
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: plot-pipeline-
annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: '2
022-01-17T13:31:32.963214',
pipelines.kubeflow.org/pipeline_spec: '{"name": "plot_pipeline"}'}
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}
spec:
entrypoint: plot-pipeline
templates:
- name: plot-linear
container:
args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]
command:
- sh
- -c
- (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
'matplotlib' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet
--no-warn-script-location 'matplotlib' --user) && "$0" "$@"
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def _make_parent_dirs_and_return_path(file_path: str):
import os
os.makedirs(os.path.dirname(file_path), exist_ok=True)
return file_path
def plot_linear(mlpipeline_ui_metadata):
import base64
import json
from io import BytesIO
import matplotlib.pyplot as plt
plt.plot([1, 2, 3], [1, 2, 3])
tmpfile = BytesIO()
plt.savefig(tmpfile, format="png")
encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")
html = f"<img src='data:image/png;base64,{encoded}'>"
metadata = {
"outputs": [
{
"type": "web-app",
"storage": "inline",
"source": html,
},
],
}
with open(mlpipeline_ui_metadata, "w") as html_writer:
json.dump(metadata, html_writer)

import argparse
_parser = argparse.ArgumentParser(prog='Plot linear', description='')
_parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())
_outputs = plot_linear(**_parsed_args)
image: python:3.7
outputs:
artifacts:
- {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}
metadata:
labels:
pipelines.kubeflow.org/kfp_sdk_version: 1.8.9
pipelines.kubeflow.org/pipeline-sdk-type: kfp
pipelines.kubeflow.org/enable_caching: "true"
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],
"command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
install --quiet --no-warn-script-location ''matplotlib'' || PIP_DISABLE_PIP_VERSION_CHECK=1
python3 -m pip install --quiet --no-warn-script-location ''matplotlib''
--user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
\"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
"def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
exist_ok=True)\n return file_path\n\ndef plot_linear(mlpipeline_ui_metadata):\n import
base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot
as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile,
format=\"png\")\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\"utf-8\")\n\n html
= f\"<img src=''data:image/png;base64,{encoded}''>\"\n metadata = {\n \"outputs\":
[\n {\n \"type\": \"web-app\",\n \"storage\":
\"inline\",\n \"source\": html,\n },\n ],\n }\n with
open(mlpipeline_ui_metadata, \"w\") as html_writer:\n json.dump(metadata,
html_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Plot
linear'', description='''')\n_parser.add_argument(\"--mlpipeline-ui-metadata\",
dest=\"mlpipeline_ui_metadata\", type=_make_parent_dirs_and_return_path,
required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs
= plot_linear(**_parsed_args)\n"], "image": "python:3.7"}}, "name": "Plot
linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}',
pipelines.kubeflow.org/component_ref: '{}'}
- name: plot-pipeline
dag:
tasks:
- {name: plot-linear, template: plot-linear}
arguments:
parameters: []
serviceAccountName: pipeline-runner

실행 후 Visualization을 클릭합니다.

advanced-run-5.png

Run output

advanced-run-2.png

Run output은 kubeflow에서 지정한 형태로 생긴 Artifacts를 모아서 보여주는 곳이며 평가 지표(Metric)를 보여줍니다.

평가 지표(Metric)을 보여주기 위해서는 mlpipeline_metrics_path: OutputPath("Metrics") argument에 보여주고 싶은 이름과 값을 json 형태로 저장하면 됩니다. 예를 들어서 다음과 같이 작성할 수 있습니다.

@create_component_from_func
def show_metric_of_sum(
number: int,
mlpipeline_metrics_path: OutputPath("Metrics"),
):
import json
metrics = {
"metrics": [
{
"name": "sum_value",
"numberValue": number,
},
],
}
with open(mlpipeline_metrics_path, "w") as f:
json.dump(metrics, f)

평가 지표를 생성하는 컴포넌트를 파이프라인에서 생성한 파이프라인에 추가 후 실행해 보겠습니다. -전체 파이프라인은 다음과 같습니다.

import kfp
from kfp.components import create_component_from_func, OutputPath
from kfp.dsl import pipeline


@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number

@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int) -> int:
sum_number = number_1 + number_2
print(sum_number)
return sum_number

@create_component_from_func
def show_metric_of_sum(
number: int,
mlpipeline_metrics_path: OutputPath("Metrics"),
):
import json
metrics = {
"metrics": [
{
"name": "sum_value",
"numberValue": number,
},
],
}
with open(mlpipeline_metrics_path, "w") as f:
json.dump(metrics, f)

@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1)
number_2_result = print_and_return_number(number_2)
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
)
show_metric_of_sum(sum_result.output)


if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

실행 후 Run Output을 클릭하면 다음과 같이 나옵니다.

advanced-run-4.png

Config

advanced-run-3.png

Config에서는 파이프라인 Config로 입력받은 모든 값을 확인할 수 있습니다.

- +전체 파이프라인은 다음과 같습니다.

import kfp
from kfp.components import create_component_from_func, OutputPath
from kfp.dsl import pipeline


@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number

@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int) -> int:
sum_number = number_1 + number_2
print(sum_number)
return sum_number

@create_component_from_func
def show_metric_of_sum(
number: int,
mlpipeline_metrics_path: OutputPath("Metrics"),
):
import json
metrics = {
"metrics": [
{
"name": "sum_value",
"numberValue": number,
},
],
}
with open(mlpipeline_metrics_path, "w") as f:
json.dump(metrics, f)

@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1)
number_2_result = print_and_return_number(number_2)
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
)
show_metric_of_sum(sum_result.output)


if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

실행 후 Run Output을 클릭하면 다음과 같이 나옵니다.

advanced-run-4.png

Config

advanced-run-3.png

Config에서는 파이프라인 Config로 입력받은 모든 값을 확인할 수 있습니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/basic-component/index.html b/docs/1.0/kubeflow/basic-component/index.html index 383b917b..0aaadb2d 100644 --- a/docs/1.0/kubeflow/basic-component/index.html +++ b/docs/1.0/kubeflow/basic-component/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 자세한 내용은 Kubeflow 공식 문서를 참고 하시길 바랍니다.
예를 들어서 입력받은 숫자를 2로 나눈 몫과 나머지를 반환하는 컴포넌트는 다음과 같이 작성해야 합니다.

from typing import NamedTuple


def divide_and_return_number(
number: int,
) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):
from collections import namedtuple

quotient, remainder = divmod(number, 2)
print("quotient is", quotient)
print("remainder is", remainder)

divide_outputs = namedtuple(
"DivideOutputs",
[
"quotient",
"remainder",
],
)
return divide_outputs(quotient, remainder)

Convert to Kubeflow Format

이제 작성한 컴포넌트를 kubeflow에서 사용할 수 있는 포맷으로 변환해야 합니다. 변환은 kfp.components.create_component_from_func 를 통해서 할 수 있습니다.
이렇게 변환된 형태는 파이썬에서 함수로 import 하여서 파이프라인에서 사용할 수 있습니다.

from kfp.components import create_component_from_func

@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number

Share component with yaml file

만약 파이썬 코드로 공유를 할 수 없는 경우 YAML 파일로 컴포넌트를 공유해서 사용할 수 있습니다. -이를 위해서는 우선 컴포넌트를 YAML 파일로 변환한 뒤 kfp.components.load_component_from_file 을 통해 파이프라인에서 사용할 수 있습니다.

우선 작성한 컴포넌트를 YAML 파일로 변환하는 과정에 대해서 설명합니다.

from kfp.components import create_component_from_func

@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number

if __name__ == "__main__":
print_and_return_number.component_spec.save("print_and_return_number.yaml")

작성한 파이썬 코드를 실행하면 print_and_return_number.yaml 파일이 생성됩니다. 파일을 확인하면 다음과 같습니다.

name: Print and return number
inputs:
- {name: number, type: Integer}
outputs:
- {name: Output, type: Integer}
implementation:
container:
image: python:3.7
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def print_and_return_number(number):
print(number)
return number

def _serialize_int(int_value: int) -> str:
if isinstance(int_value, str):
return int_value
if not isinstance(int_value, int):
raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
return str(int_value)

import argparse
_parser = argparse.ArgumentParser(prog='Print and return number', description='')
_parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])

_outputs = print_and_return_number(**_parsed_args)

_outputs = [_outputs]

_output_serializers = [
_serialize_int,

]

import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
args:
- --number
- {inputValue: number}
- '----output-paths'
- {outputPath: Output}

이제 생성된 파일을 공유해서 파이프라인에서 다음과 같이 사용할 수 있습니다.

from kfp.components import load_component_from_file

print_and_return_number = load_component_from_file("print_and_return_number.yaml")

How Kubeflow executes component

Kubeflow에서 컴포넌트가 실행되는 순서는 다음과 같습니다.

  1. docker pull <image>: 정의된 컴포넌트의 실행 환경 정보가 담긴 이미지를 pull
  2. run command: pull 한 이미지에서 컴포넌트 콘텐츠를 실행합니다.

print_and_return_number.yaml 를 예시로 들자면 @create_component_from_func 의 default image 는 python:3.7 이므로 해당 이미지를 기준으로 컴포넌트 콘텐츠를 실행하게 됩니다.

  1. docker pull python:3.7
  2. print(number)

References:

- +이를 위해서는 우선 컴포넌트를 YAML 파일로 변환한 뒤 kfp.components.load_component_from_file 을 통해 파이프라인에서 사용할 수 있습니다.

우선 작성한 컴포넌트를 YAML 파일로 변환하는 과정에 대해서 설명합니다.

from kfp.components import create_component_from_func

@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number

if __name__ == "__main__":
print_and_return_number.component_spec.save("print_and_return_number.yaml")

작성한 파이썬 코드를 실행하면 print_and_return_number.yaml 파일이 생성됩니다. 파일을 확인하면 다음과 같습니다.

name: Print and return number
inputs:
- {name: number, type: Integer}
outputs:
- {name: Output, type: Integer}
implementation:
container:
image: python:3.7
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def print_and_return_number(number):
print(number)
return number

def _serialize_int(int_value: int) -> str:
if isinstance(int_value, str):
return int_value
if not isinstance(int_value, int):
raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
return str(int_value)

import argparse
_parser = argparse.ArgumentParser(prog='Print and return number', description='')
_parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])

_outputs = print_and_return_number(**_parsed_args)

_outputs = [_outputs]

_output_serializers = [
_serialize_int,

]

import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
args:
- --number
- {inputValue: number}
- '----output-paths'
- {outputPath: Output}

이제 생성된 파일을 공유해서 파이프라인에서 다음과 같이 사용할 수 있습니다.

from kfp.components import load_component_from_file

print_and_return_number = load_component_from_file("print_and_return_number.yaml")

How Kubeflow executes component

Kubeflow에서 컴포넌트가 실행되는 순서는 다음과 같습니다.

  1. docker pull <image>: 정의된 컴포넌트의 실행 환경 정보가 담긴 이미지를 pull
  2. run command: pull 한 이미지에서 컴포넌트 콘텐츠를 실행합니다.

print_and_return_number.yaml 를 예시로 들자면 @create_component_from_func 의 default image 는 python:3.7 이므로 해당 이미지를 기준으로 컴포넌트 콘텐츠를 실행하게 됩니다.

  1. docker pull python:3.7
  2. print(number)

References:

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/basic-pipeline-upload/index.html b/docs/1.0/kubeflow/basic-pipeline-upload/index.html index d1d3afe1..ba8fbb63 100644 --- a/docs/1.0/kubeflow/basic-pipeline-upload/index.html +++ b/docs/1.0/kubeflow/basic-pipeline-upload/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
버전: 1.0

6. Pipeline - Upload

Upload Pipeline

이제 우리가 만든 파이프라인을 직접 kubeflow에서 업로드 해 보겠습니다.
파이프라인 업로드는 kubeflow 대시보드 UI를 통해 진행할 수 있습니다. Install Kubeflow 에서 사용한 방법을 이용해 포트포워딩합니다.

kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

http://localhost:8080에 접속해 대시보드를 열어줍니다.

1. Pipelines 탭 선택

pipeline-gui-0.png

2. Upload Pipeline 선택

pipeline-gui-1.png

3. Choose file 선택

pipeline-gui-2.png

4. 생성된 yaml파일 업로드

pipeline-gui-3.png

5. Create

pipeline-gui-4.png

Upload Pipeline Version

업로드된 파이프라인은 업로드를 통해서 버전을 관리할 수 있습니다. 다만 깃헙과 같은 코드 차원의 버전 관리가 아닌 같은 이름의 파이프라인을 모아서 보여주는 역할을 합니다. -위의 예시에서 파이프라인을 업로드한 경우 다음과 같이 example_pipeline이 생성된 것을 확인할 수 있습니다.

pipeline-gui-5.png

클릭하면 다음과 같은 화면이 나옵니다.

pipeline-gui-4.png

Upload Version을 클릭하면 다음과 같이 파이프라인을 업로드할 수 있는 화면이 생성됩니다.

pipeline-gui-6.png

파이프라인을 업로드 합니다.

pipeline-gui-7.png

업로드된 경우 다음과 같이 파이프라인 버전을 확인할 수 있습니다.

pipeline-gui-8.png

- +위의 예시에서 파이프라인을 업로드한 경우 다음과 같이 example_pipeline이 생성된 것을 확인할 수 있습니다.

pipeline-gui-5.png

클릭하면 다음과 같은 화면이 나옵니다.

pipeline-gui-4.png

Upload Version을 클릭하면 다음과 같이 파이프라인을 업로드할 수 있는 화면이 생성됩니다.

pipeline-gui-6.png

파이프라인을 업로드 합니다.

pipeline-gui-7.png

업로드된 경우 다음과 같이 파이프라인 버전을 확인할 수 있습니다.

pipeline-gui-8.png

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/basic-pipeline/index.html b/docs/1.0/kubeflow/basic-pipeline/index.html index 4292657c..e8d29ec0 100644 --- a/docs/1.0/kubeflow/basic-pipeline/index.html +++ b/docs/1.0/kubeflow/basic-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -21,8 +21,8 @@ 만약, 여러 개의 반환 값이 있다면 outputs에 저장이 되며 dict 타입이기에 key를 이용해 원하는 반환 값을 사용할 수 있습니다. 예를 들어서 앞에서 작성한 여러 개를 반환하는 컴포넌트 의 경우를 보겠습니다. divde_and_return_number 의 return 값은 quotientremainder 가 있습니다. 이 두 값을 print_and_return_number 에 전달하는 예시를 보면 다음과 같습니다.

def multi_pipeline():
divided_result = divde_and_return_number(number)
num_1_result = print_and_return_number(divided_result.outputs["quotient"])
num_2_result = print_and_return_number(divided_result.outputs["remainder"])

divde_and_return_number의 결과를 divided_result에 저장하고 각각 divided_result.outputs["quotient"], divided_result.outputs["remainder"]로 값을 가져올 수 있습니다.

Write to python code

이제 다시 본론으로 돌아와서 이 두 값의 결과를 sum_and_print_numbers 에 전달합니다.

def example_pipeline():
number_1_result = print_and_return_number(number_1)
number_2_result = print_and_return_number(number_2)
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
)

다음으로 각 컴포넌트에 필요한 Config들을 모아서 파이프라인 Config로 정의 합니다.

def example_pipeline(number_1: int, number_2:int):
number_1_result = print_and_return_number(number_1)
number_2_result = print_and_return_number(number_2)
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
)

Convert to Kubeflow Format

마지막으로 kubeflow에서 사용할 수 있는 형식으로 변환합니다. 변환은 kfp.dsl.pipeline 함수를 이용해 할 수 있습니다.

from kfp.dsl import pipeline


@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1)
number_2_result = print_and_return_number(number_2)
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
)

Kubeflow에서 파이프라인을 실행하기 위해서는 yaml 형식으로만 가능하기 때문에 생성한 파이프라인을 정해진 yaml 형식으로 컴파일(Compile) 해 주어야 합니다. -컴파일은 다음 명령어를 이용해 생성할 수 있습니다.

if __name__ == "__main__":
import kfp
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

Conclusion

앞서 설명한 내용을 한 파이썬 코드로 모으면 다음과 같이 됩니다.

import kfp
from kfp.components import create_component_from_func
from kfp.dsl import pipeline

@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number

@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int):
print(number_1 + number_2)

@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1)
number_2_result = print_and_return_number(number_2)
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
)

if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

컴파일된 결과를 보면 다음과 같습니다.

example_pipeline.yaml
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: example-pipeline-
annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
"Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
spec:
entrypoint: example-pipeline
templates:
- name: example-pipeline
inputs:
parameters:
- {name: number_1}
- {name: number_2}
dag:
tasks:
- name: print-and-return-number
template: print-and-return-number
arguments:
parameters:
- {name: number_1, value: '{{inputs.parameters.number_1}}'}
- name: print-and-return-number-2
template: print-and-return-number-2
arguments:
parameters:
- {name: number_2, value: '{{inputs.parameters.number_2}}'}
- name: sum-and-print-numbers
template: sum-and-print-numbers
dependencies: [print-and-return-number, print-and-return-number-2]
arguments:
parameters:
- {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
- {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
- name: print-and-return-number
container:
args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def print_and_return_number(number):
print(number)
return number

def _serialize_int(int_value: int) -> str:
if isinstance(int_value, str):
return int_value
if not isinstance(int_value, int):
raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
return str(int_value)

import argparse
_parser = argparse.ArgumentParser(prog='Print and return number', description='')
_parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])

_outputs = print_and_return_number(**_parsed_args)

_outputs = [_outputs]

_output_serializers = [
_serialize_int,

]

import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: python:3.7
inputs:
parameters:
- {name: number_1}
outputs:
parameters:
- name: print-and-return-number-Output
valueFrom: {path: /tmp/outputs/Output/data}
artifacts:
- {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
metadata:
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
"Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
\"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
print_and_return_number(number):\n print(number)\n return number\n\ndef
_serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
\"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
and return number'', description='''')\n_parser.add_argument(\"--number\",
dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
= _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
= [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
"image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
"name": "Print and return number", "outputs": [{"name": "Output", "type":
"Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
"{{inputs.parameters.number_1}}"}'}
- name: print-and-return-number-2
container:
args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def print_and_return_number(number):
print(number)
return number

def _serialize_int(int_value: int) -> str:
if isinstance(int_value, str):
return int_value
if not isinstance(int_value, int):
raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
return str(int_value)

import argparse
_parser = argparse.ArgumentParser(prog='Print and return number', description='')
_parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])

_outputs = print_and_return_number(**_parsed_args)

_outputs = [_outputs]

_output_serializers = [
_serialize_int,

]

import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: python:3.7
inputs:
parameters:
- {name: number_2}
outputs:
parameters:
- name: print-and-return-number-2-Output
valueFrom: {path: /tmp/outputs/Output/data}
artifacts:
- {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
metadata:
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
"Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
\"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
print_and_return_number(number):\n print(number)\n return number\n\ndef
_serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
\"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
and return number'', description='''')\n_parser.add_argument(\"--number\",
dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
= _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
= [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
"image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
"name": "Print and return number", "outputs": [{"name": "Output", "type":
"Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
"{{inputs.parameters.number_2}}"}'}
- name: sum-and-print-numbers
container:
args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
'{{inputs.parameters.print-and-return-number-2-Output}}']
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def sum_and_print_numbers(number_1, number_2):
print(number_1 + number_2)

import argparse
_parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
_parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = sum_and_print_numbers(**_parsed_args)
image: python:3.7
inputs:
parameters:
- {name: print-and-return-number-2-Output}
- {name: print-and-return-number-Output}
metadata:
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
"number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
\"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
"image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
{"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
"{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
arguments:
parameters:
- {name: number_1}
- {name: number_2}
serviceAccountName: pipeline-runner
- +컴파일은 다음 명령어를 이용해 생성할 수 있습니다.

if __name__ == "__main__":
import kfp
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

Conclusion

앞서 설명한 내용을 한 파이썬 코드로 모으면 다음과 같이 됩니다.

import kfp
from kfp.components import create_component_from_func
from kfp.dsl import pipeline

@create_component_from_func
def print_and_return_number(number: int) -> int:
print(number)
return number

@create_component_from_func
def sum_and_print_numbers(number_1: int, number_2: int):
print(number_1 + number_2)

@pipeline(name="example_pipeline")
def example_pipeline(number_1: int, number_2: int):
number_1_result = print_and_return_number(number_1)
number_2_result = print_and_return_number(number_2)
sum_result = sum_and_print_numbers(
number_1=number_1_result.output, number_2=number_2_result.output
)

if __name__ == "__main__":
kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

컴파일된 결과를 보면 다음과 같습니다.

example_pipeline.yaml
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: example-pipeline-
annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
"Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
spec:
entrypoint: example-pipeline
templates:
- name: example-pipeline
inputs:
parameters:
- {name: number_1}
- {name: number_2}
dag:
tasks:
- name: print-and-return-number
template: print-and-return-number
arguments:
parameters:
- {name: number_1, value: '{{inputs.parameters.number_1}}'}
- name: print-and-return-number-2
template: print-and-return-number-2
arguments:
parameters:
- {name: number_2, value: '{{inputs.parameters.number_2}}'}
- name: sum-and-print-numbers
template: sum-and-print-numbers
dependencies: [print-and-return-number, print-and-return-number-2]
arguments:
parameters:
- {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
- {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
- name: print-and-return-number
container:
args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def print_and_return_number(number):
print(number)
return number

def _serialize_int(int_value: int) -> str:
if isinstance(int_value, str):
return int_value
if not isinstance(int_value, int):
raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
return str(int_value)

import argparse
_parser = argparse.ArgumentParser(prog='Print and return number', description='')
_parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])

_outputs = print_and_return_number(**_parsed_args)

_outputs = [_outputs]

_output_serializers = [
_serialize_int,

]

import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: python:3.7
inputs:
parameters:
- {name: number_1}
outputs:
parameters:
- name: print-and-return-number-Output
valueFrom: {path: /tmp/outputs/Output/data}
artifacts:
- {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
metadata:
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
"Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
\"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
print_and_return_number(number):\n print(number)\n return number\n\ndef
_serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
\"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
and return number'', description='''')\n_parser.add_argument(\"--number\",
dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
= _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
= [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
"image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
"name": "Print and return number", "outputs": [{"name": "Output", "type":
"Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
"{{inputs.parameters.number_1}}"}'}
- name: print-and-return-number-2
container:
args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def print_and_return_number(number):
print(number)
return number

def _serialize_int(int_value: int) -> str:
if isinstance(int_value, str):
return int_value
if not isinstance(int_value, int):
raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
return str(int_value)

import argparse
_parser = argparse.ArgumentParser(prog='Print and return number', description='')
_parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
_parsed_args = vars(_parser.parse_args())
_output_files = _parsed_args.pop("_output_paths", [])

_outputs = print_and_return_number(**_parsed_args)

_outputs = [_outputs]

_output_serializers = [
_serialize_int,

]

import os
for idx, output_file in enumerate(_output_files):
try:
os.makedirs(os.path.dirname(output_file))
except OSError:
pass
with open(output_file, 'w') as f:
f.write(_output_serializers[idx](_outputs[idx]))
image: python:3.7
inputs:
parameters:
- {name: number_2}
outputs:
parameters:
- name: print-and-return-number-2-Output
valueFrom: {path: /tmp/outputs/Output/data}
artifacts:
- {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
metadata:
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
"Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
\"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
print_and_return_number(number):\n print(number)\n return number\n\ndef
_serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
\"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
and return number'', description='''')\n_parser.add_argument(\"--number\",
dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
= _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
= [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
"image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
"name": "Print and return number", "outputs": [{"name": "Output", "type":
"Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
"{{inputs.parameters.number_2}}"}'}
- name: sum-and-print-numbers
container:
args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
'{{inputs.parameters.print-and-return-number-2-Output}}']
command:
- sh
- -ec
- |
program_path=$(mktemp)
printf "%s" "$0" > "$program_path"
python3 -u "$program_path" "$@"
- |
def sum_and_print_numbers(number_1, number_2):
print(number_1 + number_2)

import argparse
_parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
_parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
_parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
_parsed_args = vars(_parser.parse_args())

_outputs = sum_and_print_numbers(**_parsed_args)
image: python:3.7
inputs:
parameters:
- {name: print-and-return-number-2-Output}
- {name: print-and-return-number-Output}
metadata:
labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
{"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
"number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
\"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
= vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
"image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
{"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
"{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
arguments:
parameters:
- {name: number_1}
- {name: number_2}
serviceAccountName: pipeline-runner
+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/basic-requirements/index.html b/docs/1.0/kubeflow/basic-requirements/index.html index eaed1bbb..d8660380 100644 --- a/docs/1.0/kubeflow/basic-requirements/index.html +++ b/docs/1.0/kubeflow/basic-requirements/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

3. Install Requirements

실습을 위해 권장하는 파이썬 버전은 python>=3.7입니다. 파이썬 환경에 익숙하지 않은 분들은 다음 Appendix 1. 파이썬 가상환경을 참고하여 클라이언트 노드에 설치해주신 뒤 패키지 설치를 진행해주시기를 바랍니다.

실습을 진행하기에서 필요한 패키지들과 버전은 다음과 같습니다.

  • requirements.txt

    kfp==1.8.9
    scikit-learn==1.0.1
    mlflow==1.21.0
    pandas==1.3.4
    dill==0.3.4

앞에서 만든 파이썬 가상환경을 활성화합니다.

pyenv activate demo

패키지 설치를 진행합니다.

pip3 install -U pip
pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
- +
버전: 1.0

3. Install Requirements

실습을 위해 권장하는 파이썬 버전은 python>=3.7입니다. 파이썬 환경에 익숙하지 않은 분들은 다음 Appendix 1. 파이썬 가상환경을 참고하여 클라이언트 노드에 설치해주신 뒤 패키지 설치를 진행해주시기를 바랍니다.

실습을 진행하기에서 필요한 패키지들과 버전은 다음과 같습니다.

  • requirements.txt

    kfp==1.8.9
    scikit-learn==1.0.1
    mlflow==1.21.0
    pandas==1.3.4
    dill==0.3.4

앞에서 만든 파이썬 가상환경을 활성화합니다.

pyenv activate demo

패키지 설치를 진행합니다.

pip3 install -U pip
pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/basic-run/index.html b/docs/1.0/kubeflow/basic-run/index.html index 4649335a..c5cea373 100644 --- a/docs/1.0/kubeflow/basic-run/index.html +++ b/docs/1.0/kubeflow/basic-run/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
버전: 1.0

7. Pipeline - Run

Run Pipeline

이제 업로드한 파이프라인을 실행시켜 보겠습니다.

Before Run

1. Create Experiment

Experiment란 Kubeflow 에서 실행되는 Run을 논리적으로 관리하는 단위입니다.

Kubeflow에서 namespace를 처음 들어오면 생성되어 있는 Experiment가 없습니다. 따라서 파이프라인을 실행하기 전에 미리 Experiment를 생성해두어야 합니다. Experiment이 있다면 Run Pipeline으로 넘어가도 무방합니다.

Experiment는 Create Experiment 버튼을 통해 생성할 수 있습니다.

run-0.png

2. Name 입력

Experiment로 사용할 이름을 입력합니다. run-1.png

Run Pipeline

1. Create Run 선택

run-2.png

2. Experiment 선택

run-9.png

run-10.png

3. Pipeline Config 입력

파이프라인을 생성할 때 입력한 Config 값들을 채워 넣습니다. 업로드한 파이프라인은 number_1과 number_2를 입력해야 합니다.

run-3.png

4. Start

입력 후 Start 버튼을 누르면 파이프라인이 실행됩니다.

run-4.png

Run Result

실행된 파이프라인들은 Runs 탭에서 확인할 수 있습니다. -Run을 클릭하면 실행된 파이프라인과 관련된 자세한 내용을 확인해 볼 수 있습니다.

run-5.png

클릭하면 다음과 같은 화면이 나옵니다. 아직 실행되지 않은 컴포넌트는 회색 표시로 나옵니다.

run-6.png

컴포넌트가 실행이 완료되면 초록색 체크 표시가 나옵니다.

run-7.png

가장 마지막 컴포넌트를 보면 입력한 Config인 3과 5의 합인 8이 출력된 것을 확인할 수 있습니다.

run-8.png

- +Run을 클릭하면 실행된 파이프라인과 관련된 자세한 내용을 확인해 볼 수 있습니다.

run-5.png

클릭하면 다음과 같은 화면이 나옵니다. 아직 실행되지 않은 컴포넌트는 회색 표시로 나옵니다.

run-6.png

컴포넌트가 실행이 완료되면 초록색 체크 표시가 나옵니다.

run-7.png

가장 마지막 컴포넌트를 보면 입력한 Config인 3과 5의 합인 8이 출력된 것을 확인할 수 있습니다.

run-8.png

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/how-to-debug/index.html b/docs/1.0/kubeflow/how-to-debug/index.html index 943d240e..5536b303 100644 --- a/docs/1.0/kubeflow/how-to-debug/index.html +++ b/docs/1.0/kubeflow/how-to-debug/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 우선 컴포넌트를 클릭하고 Input/Ouput 탭에서 입력값으로 들어간 데이터들을 다운로드 받습니다.
다운로드는 빨간색 네모로 표시된 곳의 링크를 클릭하면 됩니다.

debug-5.png

두 개의 파일을 같은 경로에 다운로드합니다.
그리고 해당 경로로 이동해서 파일을 확인합니다.

ls

다음과 같이 두 개의 파일이 있습니다.

drop-na-from-csv-output.tgz load-iris-data-target.tgz

압축을 풀어보겠습니다.

tar -xzvf load-iris-data-target.tgz ; mv data target.csv
tar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv

그리고 이를 주피터 노트북을 이용해 컴포넌트 코드를 실행합니다.

debug-3.png

디버깅을 해본 결과 dropna 할 때 column을 기준으로 drop을 해야 하는데 row를 기준으로 drop을 해서 데이터가 모두 사라졌습니다. -이제 문제의 원인을 알아냈으니 column을 기준으로 drop이 되게 컴포넌트를 수정합니다.

@partial(
create_component_from_func,
packages_to_install=["pandas"],
)
def drop_na_from_csv(
data_path: InputPath("csv"),
output_path: OutputPath("csv"),
):
import pandas as pd

data = pd.read_csv(data_path)
data = data.dropna(axis="columns")
data.to_csv(output_path, index=False)

수정 후 파이프라인을 다시 업로드하고 실행하면 다음과 같이 정상적으로 수행하는 것을 확인할 수 있습니다.

debug-6.png

- +이제 문제의 원인을 알아냈으니 column을 기준으로 drop이 되게 컴포넌트를 수정합니다.

@partial(
create_component_from_func,
packages_to_install=["pandas"],
)
def drop_na_from_csv(
data_path: InputPath("csv"),
output_path: OutputPath("csv"),
):
import pandas as pd

data = pd.read_csv(data_path)
data = data.dropna(axis="columns")
data.to_csv(output_path, index=False)

수정 후 파이프라인을 다시 업로드하고 실행하면 다음과 같이 정상적으로 수행하는 것을 확인할 수 있습니다.

debug-6.png

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/kubeflow-concepts/index.html b/docs/1.0/kubeflow/kubeflow-concepts/index.html index 661f96c8..15047a58 100644 --- a/docs/1.0/kubeflow/kubeflow-concepts/index.html +++ b/docs/1.0/kubeflow/kubeflow-concepts/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 컴포넌트 래퍼가 콘텐츠를 감싸면 다음과 같이 됩니다.

concept-4.png

Artifacts

위의 설명에서 컴포넌트는 아티팩트(Artifacts)를 생성한다고 했습니다. 아티팩트란 evaluation result, log 등 어떤 형태로든 파일로 생성되는 것을 통틀어서 칭하는 용어입니다. 그중 우리가 관심을 두는 유의미한 것들은 다음과 같은 것들이 있습니다.

concept-5.png

  • Model
  • Data
  • Metric
  • etc

Model

저희는 모델을 다음과 같이 정의 했습니다.

모델이란 파이썬 코드와 학습된 Weights와 Network 구조 그리고 이를 실행시키기 위한 환경이 모두 포함된 형태

Data

데이터는 전 처리된 피처, 모델의 예측 값 등을 포함합니다.

Metric

Metric은 동적 지표와 정적 지표 두 가지로 나누었습니다.

  • 동적 지표란 train loss와 같이 학습이 진행되는 중 에폭(Epoch)마다 계속해서 변화하는 값을 의미합니다.
  • 정적 지표란 학습이 끝난 후 최종적으로 모델을 평가하는 정확도 등을 의미합니다.

Pipeline

파이프라인은 컴포넌트의 집합과 컴포넌트를 실행시키는 순서도로 구성되어 있습니다. 이 때, 순서도는 방향 순환이 없는 그래프로 이루어져 있으며, 간단한 조건문을 포함할 수 있습니다.

concept-6.png

Pipeline Config

앞서 컴포넌트를 실행시키기 위해서는 Config가 필요하다고 설명했습니다. 파이프라인을 구성하는 컴포넌트의 Config 들을 모아 둔 것이 파이프라인 Config입니다.

concept-7.png

Run

파이프라인이 필요로 하는 파이프라인 Config가 주어져야지만 파이프라인을 실행할 수 있습니다.
Kubeflow에서는 실행된 파이프라인을 Run 이라고 부릅니다.

concept-8.png

파이프라인이 실행되면 각 컴포넌트가 아티팩트들을 생성합니다. -Kubeflow pipeline에서는 Run 하나당 고유한 ID 를 생성하고, Run에서 생성되는 모든 아티팩트들을 저장합니다.

concept-9.png

그러면 이제 직접 컴포넌트와 파이프라인을 작성하는 방법에 대해서 알아보도록 하겠습니다.

- +Kubeflow pipeline에서는 Run 하나당 고유한 ID 를 생성하고, Run에서 생성되는 모든 아티팩트들을 저장합니다.

concept-9.png

그러면 이제 직접 컴포넌트와 파이프라인을 작성하는 방법에 대해서 알아보도록 하겠습니다.

+ \ No newline at end of file diff --git a/docs/1.0/kubeflow/kubeflow-intro/index.html b/docs/1.0/kubeflow/kubeflow-intro/index.html index e03eafb1..5bb47647 100644 --- a/docs/1.0/kubeflow/kubeflow-intro/index.html +++ b/docs/1.0/kubeflow/kubeflow-intro/index.html @@ -7,13 +7,13 @@ - +
-
버전: 1.0

1. Kubeflow Introduction

Kubeflow를 사용하기 위해서는 컴포넌트(Component)와 파이프라인(Pipeline)을 작성해야 합니다.

모두의 MLOps에서 설명하는 방식은 Kubeflow Pipeline 공식 홈페이지에서 설명하는 방식과는 다소 차이가 있습니다. 여기에서는 Kubeflow Pipeline을 워크플로(Workflow)가 아닌 앞서 설명한 MLOps를 구성하는 요소 중 하나의 컴포넌트로 사용하기 때문입니다.

그럼 이제 컴포넌트와 파이프라인은 무엇이며 어떻게 작성할 수 있는지 알아보도록 하겠습니다.

- +
버전: 1.0

1. Kubeflow Introduction

Kubeflow를 사용하기 위해서는 컴포넌트(Component)와 파이프라인(Pipeline)을 작성해야 합니다.

모두의 MLOps에서 설명하는 방식은 Kubeflow Pipeline 공식 홈페이지에서 설명하는 방식과는 다소 차이가 있습니다. 여기에서는 Kubeflow Pipeline을 워크플로(Workflow)가 아닌 앞서 설명한 MLOps를 구성하는 요소 중 하나의 컴포넌트로 사용하기 때문입니다.

그럼 이제 컴포넌트와 파이프라인은 무엇이며 어떻게 작성할 수 있는지 알아보도록 하겠습니다.

+ \ No newline at end of file diff --git a/docs/1.0/prerequisites/docker/advanced/index.html b/docs/1.0/prerequisites/docker/advanced/index.html index 75caa7da..38dd8676 100644 --- a/docs/1.0/prerequisites/docker/advanced/index.html +++ b/docs/1.0/prerequisites/docker/advanced/index.html @@ -7,7 +7,7 @@ - + @@ -33,8 +33,8 @@ 이렇게 docker container 내부에서 빠져나온 상황을 detached 라고 부릅니다. 도커에서는 run 을 실행함과 동시에 detached mode 로 실행시킬 수 있는 옵션을 제공합니다.

Third Practice

docker run -d ubuntu sleep 10

detached mode 이므로 해당 명령을 실행시킨 터미널에서 다른 액션을 수행시킬 수 있습니다.

상황에 따라 detached mode 를 적절히 활용하면 좋습니다.
예를 들어, DB 와 통신하는 Backend API server 를 개발할 때 Backend API server 는 source code 를 변경시켜가면서 hot-loading 으로 계속해서 로그를 확인해봐야 하지만, DB 는 로그를 지켜볼 필요는 없는 경우라면 다음과 같이 실행할 수 있습니다.
-DB 는 docker container 를 detached mode 로 실행시키고, Backend API server 는 attached mode 로 log 를 following 하면서 실행시키면 효율적입니다.

References

- +DB 는 docker container 를 detached mode 로 실행시키고, Backend API server 는 attached mode 로 log 를 following 하면서 실행시키면 효율적입니다.

References

+ \ No newline at end of file diff --git a/docs/1.0/prerequisites/docker/command/index.html b/docs/1.0/prerequisites/docker/command/index.html index d6754ea9..230e0c9e 100644 --- a/docs/1.0/prerequisites/docker/command/index.html +++ b/docs/1.0/prerequisites/docker/command/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 그런데 종료된 컨테이너는 왜 지워야 할까요?
종료되어 있는 도커에는 이전에 사용한 데이터가 아직 컨테이너 내부에 남아있습니다. 그래서 restart 등을 통해서 컨테이너를 재시작할 수 있습니다. -그런데 이 과정에서 disk를 사용하게 됩니다.

그래서 완전히 사용하지 않는 컨테이너를 지우기 위해서는 docker rm 명령어를 사용해야 합니다.

우선 현재 컨테이너들을 확인합니다.

docker ps -a

다음과 같이 3개의 컨테이너가 있습니다.

CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

아래 명령어를 통해 demo3 컨테이너를 삭제해 보겠습니다.

docker rm demo3

docker ps -a 명령어를 치면 다음과 같이 2개로 줄었습니다.

CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

나머지 컨테이너들도 삭제합니다.

docker rm demo2
docker rm demo1

10. Docker rmi

도커 이미지를 삭제하는 커맨드입니다.

docker rmi --help

아래 명령어를 통해 현재 어떤 이미지들이 로컬에 있는지 확인합니다.

docker images

다음과 같이 출력됩니다.

REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
busybox latest a8440bba1bc0 32 hours ago 1.41MB
ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

busybox 이미지를 삭제해 보겠습니다.

docker rmi busybox

다시 docker images를 칠 경우 다음과 같이 나옵니다.

REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

References

- +그런데 이 과정에서 disk를 사용하게 됩니다.

그래서 완전히 사용하지 않는 컨테이너를 지우기 위해서는 docker rm 명령어를 사용해야 합니다.

우선 현재 컨테이너들을 확인합니다.

docker ps -a

다음과 같이 3개의 컨테이너가 있습니다.

CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

아래 명령어를 통해 demo3 컨테이너를 삭제해 보겠습니다.

docker rm demo3

docker ps -a 명령어를 치면 다음과 같이 2개로 줄었습니다.

CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

나머지 컨테이너들도 삭제합니다.

docker rm demo2
docker rm demo1

10. Docker rmi

도커 이미지를 삭제하는 커맨드입니다.

docker rmi --help

아래 명령어를 통해 현재 어떤 이미지들이 로컬에 있는지 확인합니다.

docker images

다음과 같이 출력됩니다.

REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
busybox latest a8440bba1bc0 32 hours ago 1.41MB
ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

busybox 이미지를 삭제해 보겠습니다.

docker rmi busybox

다시 docker images를 칠 경우 다음과 같이 나옵니다.

REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

References

+ \ No newline at end of file diff --git a/docs/1.0/prerequisites/docker/images/index.html b/docs/1.0/prerequisites/docker/images/index.html index a432f84e..f0fc29d7 100644 --- a/docs/1.0/prerequisites/docker/images/index.html +++ b/docs/1.0/prerequisites/docker/images/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 비슷한 역할을 하는 명령어로 ENTRYPOINT 가 있습니다. 이 둘의 차이에 대해서는 뒤에서 다룹니다.
하나의 도커 이미지에서는 하나의 CMD 만 실행할 수 있다는 점에서 RUN 명령어와 다릅니다.

CMD <command>
CMD ["executable-command", "parameter1", "parameter2"]
CMD ["parameter1", "parameter2"] # ENTRYPOINT 와 함께 사용될 때

# 예시
CMD python main.py

WORKDIR

이후 추가될 명령어를 컨테이너 내의 어떤 디렉토리에서 수행할 것인지를 명시하는 명령어입니다.
만약, 해당 디렉토리가 없다면 생성합니다.

WORKDIR /path/to/workdir

# 예시
WORKDIR /home/demo
RUN pwd # /home/demo 가 출력됨

ENV

컨테이너 내부에서 지속적으로 사용될 environment variable 의 값을 설정하는 명령어입니다.

ENV <KEY> <VALUE>
ENV <KEY>=<VALUE>

# 예시
# default 언어 설정
RUN locale-gen ko_KR.UTF-8
ENV LANG ko_KR.UTF-8
ENV LANGUAGE ko_KR.UTF-8
ENV LC_ALL ko_KR.UTF-8

EXPOSE

컨테이너에서 뚫어줄 포트/프로토콜을 지정할 수 있습니다.
-<protocol> 을 지정하지 않으면 TCP 가 디폴트로 설정됩니다.

EXPOSE <port>
EXPOSE <port>/<protocol>

# 예시
EXPOSE 8080

3. 간단한 Dockerfile 작성해보기

vim Dockerfile 혹은 vscode 등 본인이 사용하는 편집기로 Dockerfile 을 열어 다음과 같이 작성해줍니다.

# base image 를 ubuntu 18.04 로 설정합니다.
FROM ubuntu:18.04

# apt-get update 명령을 실행합니다.
RUN apt-get update

# TEST env var의 값을 hello 로 지정합니다.
ENV TEST hello

# DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
CMD echo $TEST

4. Docker build from Dockerfile

docker build 명령어로 Dockerfile 로부터 Docker Image 를 만들어봅니다.

docker build --help

Dockerfile 이 있는 경로에서 다음 명령을 실행합니다.

docker build -t my-image:v1.0.0 .

위 커맨드를 설명하면 다음과 같습니다.

  • . : 현재 경로에 있는 Dockerfile 로부터
  • -t : my-image 라는 이름과 v1.0.0 이라는 태그이미지
  • 빌드하겠다라는 명령어

정상적으로 이미지 빌드되었는지 확인해 보겠습니다.

# grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
docker images | grep my-image

정상적으로 수행된다면 다음과 같이 출력됩니다.

my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

5. Docker run from Dockerfile

그럼 이제 방금 빌드한 my-image:v1.0.0 이미지로 docker 컨테이너를 run 해보겠습니다.

docker run my-image:v1.0.0

정상적으로 수행된다면 다음과 같이 나옵니다.

hello

6. Docker run with env

이번에는 방금 빌드한 my-image:v1.0.0 이미지를 실행하는 시점에, TEST env var 의 값을 변경하여 docker 컨테이너를 run 해보겠습니다.

docker run -e TEST=bye my-image:v1.0.0

정상적으로 수행된다면 다음과 같이 나옵니다.

bye
- +<protocol> 을 지정하지 않으면 TCP 가 디폴트로 설정됩니다.

EXPOSE <port>
EXPOSE <port>/<protocol>

# 예시
EXPOSE 8080

3. 간단한 Dockerfile 작성해보기

vim Dockerfile 혹은 vscode 등 본인이 사용하는 편집기로 Dockerfile 을 열어 다음과 같이 작성해줍니다.

# base image 를 ubuntu 18.04 로 설정합니다.
FROM ubuntu:18.04

# apt-get update 명령을 실행합니다.
RUN apt-get update

# TEST env var의 값을 hello 로 지정합니다.
ENV TEST hello

# DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
CMD echo $TEST

4. Docker build from Dockerfile

docker build 명령어로 Dockerfile 로부터 Docker Image 를 만들어봅니다.

docker build --help

Dockerfile 이 있는 경로에서 다음 명령을 실행합니다.

docker build -t my-image:v1.0.0 .

위 커맨드를 설명하면 다음과 같습니다.

  • . : 현재 경로에 있는 Dockerfile 로부터
  • -t : my-image 라는 이름과 v1.0.0 이라는 태그이미지
  • 빌드하겠다라는 명령어

정상적으로 이미지 빌드되었는지 확인해 보겠습니다.

# grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
docker images | grep my-image

정상적으로 수행된다면 다음과 같이 출력됩니다.

my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

5. Docker run from Dockerfile

그럼 이제 방금 빌드한 my-image:v1.0.0 이미지로 docker 컨테이너를 run 해보겠습니다.

docker run my-image:v1.0.0

정상적으로 수행된다면 다음과 같이 나옵니다.

hello

6. Docker run with env

이번에는 방금 빌드한 my-image:v1.0.0 이미지를 실행하는 시점에, TEST env var 의 값을 변경하여 docker 컨테이너를 run 해보겠습니다.

docker run -e TEST=bye my-image:v1.0.0

정상적으로 수행된다면 다음과 같이 나옵니다.

bye
+ \ No newline at end of file diff --git a/docs/1.0/prerequisites/docker/index.html b/docs/1.0/prerequisites/docker/index.html index 5c966969..a651eb5a 100644 --- a/docs/1.0/prerequisites/docker/index.html +++ b/docs/1.0/prerequisites/docker/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
버전: 1.0

What is Docker?

컨테이너

  • 컨테이너 가상화
    • 어플리케이션을 어디에서나 동일하게 실행하는 기술
  • 컨테이너 이미지
    • 어플리케이션을 실행시키기 위해 필요한 모든 파일들의 집합
    • → 붕어빵 틀
  • 컨테이너란?
    • 컨테이너 이미지를 기반으로 실행된 한 개의 프로세스
    • → 붕어빵 틀로 찍어낸 붕어빵

도커

도커는 컨테이너를 관리하고 사용할 수 있게 해주는 플랫폼입니다.
이러한 도커의 슬로건은 바로 Build Once, Run Anywhere 로 어디에서나 동일한 실행 결과를 보장합니다.

도커 내부에서 동작하는 과정을 보자면 실제로 container 를 위한 리소스를 분리하고, lifecycle 을 제어하는 기능은 linux kernel 의 cgroup 등이 수행합니다. 하지만 이러한 인터페이스를 바로 사용하는 것은 너무 어렵기 때문에 다음과 같은 추상화 layer를 만들게 됩니다.

docker-layer.png

이를 통해 사용자는 사용자 친화적인 API 인 Docker CLI 만으로 쉽게 컨테이너를 제어할 수 있습니다.

Layer 해석

위에서 나온 layer들의 역할은 다음과 같습니다.

  1. runC: linux kernel 의 기능을 직접 사용해서, container 라는 하나의 프로세스가 사용할 네임스페이스와 cpu, memory, filesystem 등을 격리시켜주는 기능을 수행합니다.
  2. containerd: runC(OCI layer) 에게 명령을 내리기 위한 추상화 단계이며, 표준화된 인터페이스(OCI)를 사용합니다.
  3. dockerd: containerd 에게 명령을 내리는 역할만 합니다.
  4. docker cli: 사용자는 docker cli 로 dockerd (Docker daemon)에게 명령을 내리기만 하면 됩니다.
    • 이 통신 과정에서 unix socket 을 사용하기 때문에 가끔 도커 관련 에러가 나면 /var/run/docker.sock 가 사용 중이다, 권한이 없다 등등의 에러 메시지가 나오는 것입니다.

이처럼 도커는 많은 단계를 감싸고 있지만, 흔히 도커라는 용어를 사용할 때는 Docker CLI 를 말할 때도 있고, Dockerd 를 말할 때도 있고 Docker Container 하나를 말할 때도 있어서 혼란이 생길 수 있습니다.
-앞으로 나오는 글에서도 도커가 여러가지 의미로 쓰일 수 있습니다.

For ML Engineer

머신러닝 엔지니어가 도커를 사용하는 이유는 다음과 같습니다.

  1. 나의 ML 학습/추론 코드를 OS, python version, python 환경, 특정 python package 버전에 independent 하도록 해야 한다.
  2. 그래서 코드 뿐만이 아닌 해당 코드가 실행되기 위해 필요한 모든 종속적인 패키지, 환경 변수, 폴더명 등등을 하나의 패키지로 묶을 수 있는 기술이 컨테이너화 기술이다.
  3. 이 기술을 쉽게 사용하고 관리할 수 있는 소프트웨어 중 하나가 도커이며, 패키지를 도커 이미지라고 부른다.
- +앞으로 나오는 글에서도 도커가 여러가지 의미로 쓰일 수 있습니다.

For ML Engineer

머신러닝 엔지니어가 도커를 사용하는 이유는 다음과 같습니다.

  1. 나의 ML 학습/추론 코드를 OS, python version, python 환경, 특정 python package 버전에 independent 하도록 해야 한다.
  2. 그래서 코드 뿐만이 아닌 해당 코드가 실행되기 위해 필요한 모든 종속적인 패키지, 환경 변수, 폴더명 등등을 하나의 패키지로 묶을 수 있는 기술이 컨테이너화 기술이다.
  3. 이 기술을 쉽게 사용하고 관리할 수 있는 소프트웨어 중 하나가 도커이며, 패키지를 도커 이미지라고 부른다.
+ \ No newline at end of file diff --git a/docs/1.0/prerequisites/docker/install/index.html b/docs/1.0/prerequisites/docker/install/index.html index b83cf0b2..d96a0ee0 100644 --- a/docs/1.0/prerequisites/docker/install/index.html +++ b/docs/1.0/prerequisites/docker/install/index.html @@ -7,15 +7,15 @@ - +
버전: 1.0

Install Docker

Docker

도커 실습을 위해 도커를 설치해야 합니다.
도커 설치는 어떤 OS를 사용하는지에 따라 달라집니다.
-각 환경에 맞는 도커 설치는 공식 홈페이지를 참고해주세요.

설치 확인

docker run hello-world 가 정상적으로 수행되는 OS, 터미널 환경이 필요합니다.

OSDocker EngineTerminal
MacOSDocker Desktopzsh
WindowsDocker DesktopPowershell
WindowsDocker DesktopWSL2
UbuntuDocker Enginebash

들어가기 앞서서..

MLOps를 사용하기 위해 필요한 도커 사용법을 설명하니 많은 비유와 예시가 MLOps 쪽으로 치중되어 있을 수 있습니다.

- +각 환경에 맞는 도커 설치는 공식 홈페이지를 참고해주세요.

설치 확인

docker run hello-world 가 정상적으로 수행되는 OS, 터미널 환경이 필요합니다.

OSDocker EngineTerminal
MacOSDocker Desktopzsh
WindowsDocker DesktopPowershell
WindowsDocker DesktopWSL2
UbuntuDocker Enginebash

들어가기 앞서서..

MLOps를 사용하기 위해 필요한 도커 사용법을 설명하니 많은 비유와 예시가 MLOps 쪽으로 치중되어 있을 수 있습니다.

+ \ No newline at end of file diff --git a/docs/1.0/prerequisites/docker/introduction/index.html b/docs/1.0/prerequisites/docker/introduction/index.html index 51b352c4..ab11c2ac 100644 --- a/docs/1.0/prerequisites/docker/introduction/index.html +++ b/docs/1.0/prerequisites/docker/introduction/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 따라서 도커 버전이 업데이트될 때마다 Docker Engine 의 인터페이스가 변경되어 쿠버네티스에서 크게 영향을 받는 일이 계속해서 발생하였습니다.

Open Container Initiative

그래서 이런 불편함을 해소하고자, 도커를 중심으로 구글 등 컨테이너 기술에 관심있는 여러 집단들이 한데 모여 Open Container Initiative, 이하 OCI라는 프로젝트를 시작하여 컨테이너에 관한 표준을 정하는 일들을 시작하였습니다.
도커에서도 인터페이스를 한 번 더 분리해서, OCI 표준을 준수하는 containerd라는 Container Runtime 를 개발하고, dockerd 가 containerd 의 API 를 호출하도록 추상화 레이어를 추가하였습니다.

이러한 흐름에 맞추어서 쿠버네티스에서도 이제부터는 도커만을 지원하지 않고, OCI 표준을 준수하고, 정해진 스펙을 지키는 컨테이너 런타임은 무엇이든 쿠버네티스에서 사용할 수 있도록, Container Runtime Interface, 이하 CRI 스펙을 버전 1.5부터 제공하기 시작했습니다.

CRI-O

Red Hat, Intel, SUSE, IBM에서 OCI 표준+CRI 스펙을 따라 Kubernetes 전용 Container Runtime 을 목적으로 개발한 컨테이너 런타임입니다.

지금의 도커 & 쿠버네티스

쿠버네티스는 Docker Engine 을 디폴트 컨테이너 런타임으로 사용해왔지만, 도커의 API 가 CRI 스펙에 맞지 않아(OCI 는 따름) 도커의 API를 CRI와 호환되게 바꿔주는 dockershim을 쿠버네티스 자체적으로 개발 및 지원해왔었는데,(도커 측이 아니라 쿠버네티스 측에서 지원했다는 점이 굉장히 큰 짐이었습니다.) 이걸 쿠버네티스 v1.20 부터는 Deprecated하고, v1.23 부터는 지원을 포기하기로 결정하였습니다.

  • v1.23 은 2021 년 12월 릴리즈

그래서 쿠버네티스 v1.23 부터는 도커를 native 하게 쓸 수 없습니다다.
그렇지만 사용자들은 이런 변화에 크게 관련이 있진 않습니다. -왜냐하면 Docker Engine을 통해 만들어진 도커 이미지는 OCI 표준을 준수하기 때문에, 쿠버네티스가 어떤 컨테이너 런타임으로 이루어져있든 사용 가능하기 때문입니다.

References

- +왜냐하면 Docker Engine을 통해 만들어진 도커 이미지는 OCI 표준을 준수하기 때문에, 쿠버네티스가 어떤 컨테이너 런타임으로 이루어져있든 사용 가능하기 때문입니다.

References

+ \ No newline at end of file diff --git a/docs/1.0/setup-components/install-components-kf/index.html b/docs/1.0/setup-components/install-components-kf/index.html index ac6bb906..9b6438aa 100644 --- a/docs/1.0/setup-components/install-components-kf/index.html +++ b/docs/1.0/setup-components/install-components-kf/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
버전: 1.0

1. Kubeflow

설치 파일 준비

Kubeflow v1.4.0 버전을 설치하기 위해서, 설치에 필요한 manifests 파일들을 준비합니다.

kubeflow/manifests Repositoryv1.4.0 태그로 깃 클론한 뒤, 해당 폴더로 이동합니다.

git clone -b v1.4.0 https://github.com/kubeflow/manifests.git
cd manifests

각 구성 요소별 설치

kubeflow/manifests Repository 에 각 구성 요소별 설치 커맨드가 적혀져 있지만, 설치하며 발생할 수 있는 이슈 혹은 정상적으로 설치되었는지 확인하는 방법이 적혀져 있지 않아 처음 설치하는 경우 어려움을 겪는 경우가 많습니다.
따라서, 각 구성 요소별로 정상적으로 설치되었는지 확인하는 방법을 함께 작성합니다.

또한, 본 문서에서는 모두의 MLOps 에서 다루지 않는 구성요소인 Knative, KFServing, MPI Operator 의 설치는 리소스의 효율적 사용을 위해 따로 설치하지 않습니다.

Cert-manager

  1. cert-manager 를 설치합니다.

    kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -

    정상적으로 설치되면 다음과 같이 출력됩니다.

    namespace/cert-manager created
    customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created
    customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created
    customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created
    customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created
    customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created
    customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created
    serviceaccount/cert-manager created
    serviceaccount/cert-manager-cainjector created
    serviceaccount/cert-manager-webhook created
    role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
    role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
    role.rbac.authorization.k8s.io/cert-manager:leaderelection created
    clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created
    clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
    clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created
    clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created
    clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
    clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
    clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created
    clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created
    clusterrole.rbac.authorization.k8s.io/cert-manager-edit created
    clusterrole.rbac.authorization.k8s.io/cert-manager-view created
    clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
    rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
    rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
    rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created
    clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
    service/cert-manager created
    service/cert-manager-webhook created
    deployment.apps/cert-manager created
    deployment.apps/cert-manager-cainjector created
    deployment.apps/cert-manager-webhook created
    mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created
    validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created

    cert-manager namespace 의 3 개의 pod 가 모두 Running 이 될 때까지 기다립니다.

    kubectl get pod -n cert-manager

    모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

    NAME                                       READY   STATUS    RESTARTS   AGE
    cert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s
    cert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s
    cert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s
  2. kubeflow-issuer 를 설치합니다.

    kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -

    정상적으로 설치되면 다음과 같이 출력됩니다.

    clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created
  • cert-manager-webhook 이슈

    cert-manager-webhook deployment 가 Running 이 아닌 경우, 다음과 비슷한 에러가 발생하며 kubeflow-issuer가 설치되지 않을 수 있음에 주의하시기 바랍니다.
    해당 에러가 발생한 경우, cert-manager 의 3개의 pod 가 모두 Running 이 되는 것을 확인한 이후 다시 명령어를 수행하시기 바랍니다.

    Error from server: error when retrieving current configuration of:
    Resource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"
    Name: "kubeflow-self-signing-issuer", Namespace: ""
    from server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused

Istio

  1. istio 관련 Custom Resource Definition(CRD) 를 설치합니다.

    kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created
    customresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created
    customresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created
    customresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created
    customresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created
    customresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created
    customresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created
    customresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created
    customresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created
    customresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created
    customresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created
    customresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created
  2. istio namespace 를 설치합니다.

    kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    namespace/istio-system created
  3. istio 를 설치합니다.

    kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/istio-ingressgateway-service-account created
    serviceaccount/istio-reader-service-account created
    serviceaccount/istiod-service-account created
    role.rbac.authorization.k8s.io/istio-ingressgateway-sds created
    role.rbac.authorization.k8s.io/istiod-istio-system created
    clusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created
    clusterrole.rbac.authorization.k8s.io/istiod-istio-system created
    rolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created
    rolebinding.rbac.authorization.k8s.io/istiod-istio-system created
    clusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created
    clusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created
    configmap/istio created
    configmap/istio-sidecar-injector created
    service/istio-ingressgateway created
    service/istiod created
    deployment.apps/istio-ingressgateway created
    deployment.apps/istiod created
    envoyfilter.networking.istio.io/metadata-exchange-1.8 created
    envoyfilter.networking.istio.io/metadata-exchange-1.9 created
    envoyfilter.networking.istio.io/stats-filter-1.8 created
    envoyfilter.networking.istio.io/stats-filter-1.9 created
    envoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created
    envoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created
    envoyfilter.networking.istio.io/tcp-stats-filter-1.8 created
    envoyfilter.networking.istio.io/tcp-stats-filter-1.9 created
    envoyfilter.networking.istio.io/x-forwarded-host created
    gateway.networking.istio.io/istio-ingressgateway created
    authorizationpolicy.security.istio.io/global-deny-all created
    authorizationpolicy.security.istio.io/istio-ingressgateway created
    mutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created
    validatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created

    istio-system namespace 의 2 개의 pod 가 모두 Running 이 될 때까지 기다립니다.

    kubectl get po -n istio-system

    모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

    NAME                                   READY   STATUS    RESTARTS   AGE
    istio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s
    istiod-86457659bb-5h58w 1/1 Running 0 16s

Dex

dex 를 설치합니다.

kustomize build common/dex/overlays/istio | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

namespace/auth created
customresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created
serviceaccount/dex created
clusterrole.rbac.authorization.k8s.io/dex created
clusterrolebinding.rbac.authorization.k8s.io/dex created
configmap/dex created
secret/dex-oidc-client created
service/dex created
deployment.apps/dex created
virtualservice.networking.istio.io/dex created

auth namespace 의 1 개의 pod 가 모두 Running 이 될 때까지 기다립니다.

kubectl get po -n auth

모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

NAME                   READY   STATUS    RESTARTS   AGE
dex-5ddf47d88d-458cs 1/1 Running 1 12s

OIDC AuthService

OIDC AuthService 를 설치합니다.

kustomize build common/oidc-authservice/base | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

configmap/oidc-authservice-parameters created
secret/oidc-authservice-client created
service/authservice created
persistentvolumeclaim/authservice-pvc created
statefulset.apps/authservice created
envoyfilter.networking.istio.io/authn-filter created

istio-system namespace 에 authservice-0 pod 가 Running 이 될 때까지 기다립니다.

kubectl get po -n istio-system -w

모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

NAME                                   READY   STATUS    RESTARTS   AGE
authservice-0 1/1 Running 0 14s
istio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s
istiod-86457659bb-5h58w 1/1 Running 0 2m37s

Kubeflow Namespace

kubeflow namespace 를 생성합니다.

kustomize build common/kubeflow-namespace/base | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

namespace/kubeflow created

kubeflow namespace 를 조회합니다.

kubectl get ns kubeflow

정상적으로 생성되면 다음과 비슷한 결과가 출력됩니다.

NAME       STATUS   AGE
kubeflow Active 8s

Kubeflow Roles

kubeflow-roles 를 설치합니다.

kustomize build common/kubeflow-roles/base | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

clusterrole.rbac.authorization.k8s.io/kubeflow-admin created
clusterrole.rbac.authorization.k8s.io/kubeflow-edit created
clusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created
clusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created
clusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created
clusterrole.rbac.authorization.k8s.io/kubeflow-view created

방금 생성한 kubeflow roles 를 조회합니다.

kubectl get clusterrole | grep kubeflow

다음과 같이 총 6개의 clusterrole 이 출력됩니다.

kubeflow-admin                                                         2021-12-03T08:51:36Z
kubeflow-edit 2021-12-03T08:51:36Z
kubeflow-kubernetes-admin 2021-12-03T08:51:36Z
kubeflow-kubernetes-edit 2021-12-03T08:51:36Z
kubeflow-kubernetes-view 2021-12-03T08:51:36Z
kubeflow-view 2021-12-03T08:51:36Z

Kubeflow Istio Resources

kubeflow-istio-resources 를 설치합니다.

kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created
clusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created
clusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created
gateway.networking.istio.io/kubeflow-gateway created

방금 생성한 kubeflow roles 를 조회합니다.

kubectl get clusterrole | grep kubeflow-istio

다음과 같이 총 3개의 clusterrole 이 출력됩니다.

kubeflow-istio-admin                                                   2021-12-03T08:53:17Z
kubeflow-istio-edit 2021-12-03T08:53:17Z
kubeflow-istio-view 2021-12-03T08:53:17Z

Kubeflow namespace 에 gateway 가 정상적으로 설치되었는지 확인합니다.

kubectl get gateway -n kubeflow

정상적으로 생성되면 다음과 비슷한 결과가 출력됩니다.

NAME               AGE
kubeflow-gateway 31s

Kubeflow Pipelines

kubeflow pipelines 를 설치합니다.

kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created
customresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created
customresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created
...(생략)
authorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created
authorizationpolicy.security.istio.io/mysql created
authorizationpolicy.security.istio.io/service-cache-server created

위 명령어는 여러 resources 를 한 번에 설치하고 있지만, 설치 순서의 의존성이 있는 리소스가 존재합니다.
-따라서 때에 따라 다음과 비슷한 에러가 발생할 수 있습니다.

"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1""  

위와 비슷한 에러가 발생한다면, 10 초 정도 기다린 뒤 다시 위의 명령을 수행합니다.

kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow

다음과 같이 총 16개의 pod 가 모두 Running 이 될 때까지 기다립니다.

NAME                                                     READY   STATUS    RESTARTS   AGE
cache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s
cache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s
kubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s
metacontroller-0 1/1 Running 0 5m3s
metadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s
metadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s
metadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s
minio-5b65df66c9-k5gzg 2/2 Running 0 5m3s
ml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s
ml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s
ml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s
ml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s
ml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s
ml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s
mysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s
workflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s

추가로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80

웹 브라우저를 열어 http://localhost:8888/#/pipelines/ 경로에 접속합니다.

다음과 같은 화면이 출력되는 것을 확인합니다.

pipeline-ui

  • localhost 연결 거부 이슈

localhost-reject

만약 다음과 같이 localhost에서 연결을 거부했습니다 라는 에러가 출력될 경우, 커맨드로 address 설정을 통해 접근하는 것이 가능합니다.

보안상의 문제가 되지 않는다면, 아래와 같이 0.0.0.0 로 모든 주소의 bind를 열어주는 방향으로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80
  • 위의 옵션으로 실행했음에도 여전히 연결 거부 이슈가 발생할 경우

방화벽 설정으로 접속해 모든 tcp 프로토콜의 포트에 대한 접속을 허가 또는 8888번 포트의 접속 허가를 추가해 접근 권한을 허가해줍니다.

웹 브라우저를 열어 http://<당신의 가상 인스턴스 공인 ip 주소>:8888/#/pipelines/ 경로에 접속하면, ml-pipeline UI 화면이 출력되는 것을 확인할 수 있습니다.

하단에서 진행되는 다른 포트의 경로에 접속할 때도 위의 절차와 동일하게 커맨드를 실행하고, 방화벽에 포트 번호를 추가해주면 실행하는 것이 가능합니다.

Katib

Katib 를 설치합니다.

kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created
serviceaccount/katib-controller created
serviceaccount/katib-ui created
clusterrole.rbac.authorization.k8s.io/katib-controller created
clusterrole.rbac.authorization.k8s.io/katib-ui created
clusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created
clusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created
clusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created
clusterrolebinding.rbac.authorization.k8s.io/katib-controller created
clusterrolebinding.rbac.authorization.k8s.io/katib-ui created
configmap/katib-config created
configmap/trial-templates created
secret/katib-mysql-secrets created
service/katib-controller created
service/katib-db-manager created
service/katib-mysql created
service/katib-ui created
persistentvolumeclaim/katib-mysql created
deployment.apps/katib-controller created
deployment.apps/katib-db-manager created
deployment.apps/katib-mysql created
deployment.apps/katib-ui created
certificate.cert-manager.io/katib-webhook-cert created
issuer.cert-manager.io/katib-selfsigned-issuer created
virtualservice.networking.istio.io/katib-ui created
mutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created
validatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep katib

다음과 같이 총 4 개의 pod 가 Running 이 될 때까지 기다립니다.

katib-controller-68c47fbf8b-b985z                        1/1     Running   0          82s
katib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s
katib-mysql-7894994f88-scs62 1/1 Running 0 82s
katib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s

추가로 katib UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward svc/katib-ui -n kubeflow 8081:80

웹 브라우저를 열어 http://localhost:8081/katib/ 경로에 접속합니다.

다음과 같은 화면이 출력되는 것을 확인합니다.

katib-ui

Central Dashboard

Dashboard 를 설치합니다.

kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

serviceaccount/centraldashboard created
role.rbac.authorization.k8s.io/centraldashboard created
clusterrole.rbac.authorization.k8s.io/centraldashboard created
rolebinding.rbac.authorization.k8s.io/centraldashboard created
clusterrolebinding.rbac.authorization.k8s.io/centraldashboard created
configmap/centraldashboard-config created
configmap/centraldashboard-parameters created
service/centraldashboard created
deployment.apps/centraldashboard created
virtualservice.networking.istio.io/centraldashboard created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep centraldashboard

kubeflow namespace 에 centraldashboard 관련 1 개의 pod 가 Running 이 될 때까지 기다립니다.

centraldashboard-8fc7d8cc-xl7ts                          1/1     Running   0          52s

추가로 Central Dashboard UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward svc/centraldashboard -n kubeflow 8082:80

웹 브라우저를 열어 http://localhost:8082/ 경로에 접속합니다.

다음과 같은 화면이 출력되는 것을 확인합니다.

central-dashboard

Admission Webhook

kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created
serviceaccount/admission-webhook-service-account created
clusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created
clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created
clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created
clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created
clusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created
service/admission-webhook-service created
deployment.apps/admission-webhook-deployment created
certificate.cert-manager.io/admission-webhook-cert created
issuer.cert-manager.io/admission-webhook-selfsigned-issuer created
mutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep admission-webhook

1 개의 pod 가 Running 이 될 때까지 기다립니다.

admission-webhook-deployment-667bd68d94-2hhrx            1/1     Running   0          11s

Notebooks & Jupyter Web App

  1. Notebook controller 를 설치합니다.

    kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created
    serviceaccount/notebook-controller-service-account created
    role.rbac.authorization.k8s.io/notebook-controller-leader-election-role created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-role created
    rolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created
    configmap/notebook-controller-config-m44cmb547t created
    service/notebook-controller-service created
    deployment.apps/notebook-controller-deployment created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep notebook-controller

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    notebook-controller-deployment-75b4f7b578-w4d4l          1/1     Running   0          105s
  2. Jupyter Web App 을 설치합니다.

    kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/jupyter-web-app-service-account created
    role.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created
    rolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created
    clusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created
    configmap/jupyter-web-app-config-76844k4cd7 created
    configmap/jupyter-web-app-logos created
    configmap/jupyter-web-app-parameters-chmg88cm48 created
    service/jupyter-web-app-service created
    deployment.apps/jupyter-web-app-deployment created
    virtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep jupyter-web-app

    1개의 pod 가 Running 이 될 때까지 기다립니다.

    jupyter-web-app-deployment-6f744fbc54-p27ts              1/1     Running   0          2m

Profiles + KFAM

Profile Controller를 설치합니다.

kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created
serviceaccount/profiles-controller-service-account created
role.rbac.authorization.k8s.io/profiles-leader-election-role created
rolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created
clusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created
configmap/namespace-labels-data-48h7kd55mc created
configmap/profiles-config-46c7tgh6fd created
service/profiles-kfam created
deployment.apps/profiles-deployment created
virtualservice.networking.istio.io/profiles-kfam created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep profiles-deployment

1 개의 pod 가 Running 이 될 때까지 기다립니다.

profiles-deployment-89f7d88b-qsnrd                       2/2     Running   0          42s

Volumes Web App

Volumes Web App 을 설치합니다.

kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

serviceaccount/volumes-web-app-service-account created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created
clusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created
configmap/volumes-web-app-parameters-4gg8cm2gmk created
service/volumes-web-app-service created
deployment.apps/volumes-web-app-deployment created
virtualservice.networking.istio.io/volumes-web-app-volumes-web-app created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep volumes-web-app

1개의 pod가 Running 이 될 때까지 기다립니다.

volumes-web-app-deployment-8589d664cc-62svl              1/1     Running   0          27s

Tensorboard & Tensorboard Web App

  1. Tensorboard Web App 를 설치합니다.

    kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/tensorboards-web-app-service-account created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created
    clusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created
    configmap/tensorboards-web-app-parameters-g28fbd6cch created
    service/tensorboards-web-app-service created
    deployment.apps/tensorboards-web-app-deployment created
    virtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep tensorboards-web-app

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    tensorboards-web-app-deployment-6ff79b7f44-qbzmw            1/1     Running             0          22s
  2. Tensorboard Controller 를 설치합니다.

    kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created
    serviceaccount/tensorboard-controller created
    role.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created
    clusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created
    clusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created
    rolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created
    configmap/tensorboard-controller-config-bf88mm96c8 created
    service/tensorboard-controller-controller-manager-metrics-service created
    deployment.apps/tensorboard-controller-controller-manager created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep tensorboard-controller

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    tensorboard-controller-controller-manager-954b7c544-vjpzj   3/3     Running   1          73s

Training Operator

Training Operator 를 설치합니다.

kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created
serviceaccount/training-operator created
clusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created
clusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created
clusterrole.rbac.authorization.k8s.io/kubeflow-training-view created
clusterrole.rbac.authorization.k8s.io/training-operator created
clusterrolebinding.rbac.authorization.k8s.io/training-operator created
service/training-operator created
deployment.apps/training-operator created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep training-operator

1 개의 pod 가 Running 이 될 때까지 기다립니다.

training-operator-7d98f9dd88-6887f                          1/1     Running   0          28s

User Namespace

Kubeflow 사용을 위해, 사용할 User의 Kubeflow Profile 을 생성합니다.

kustomize build common/user-namespace/base | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

configmap/default-install-config-9h2h2b6hbk created
profile.kubeflow.org/kubeflow-user-example-com created

kubeflow-user-example-com profile 이 생성된 것을 확인합니다.

kubectl get profile
kubeflow-user-example-com   37s

정상 설치 확인

Kubeflow central dashboard에 web browser로 접속하기 위해 포트 포워딩합니다.

kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

Web Browser 를 열어 http://localhost:8080 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

login-ui

다음 접속 정보를 입력하여 접속합니다.

  • Email Address: user@example.com
  • Password: 12341234

central-dashboard

- +따라서 때에 따라 다음과 비슷한 에러가 발생할 수 있습니다.

"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1""  

위와 비슷한 에러가 발생한다면, 10 초 정도 기다린 뒤 다시 위의 명령을 수행합니다.

kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow

다음과 같이 총 16개의 pod 가 모두 Running 이 될 때까지 기다립니다.

NAME                                                     READY   STATUS    RESTARTS   AGE
cache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s
cache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s
kubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s
metacontroller-0 1/1 Running 0 5m3s
metadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s
metadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s
metadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s
minio-5b65df66c9-k5gzg 2/2 Running 0 5m3s
ml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s
ml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s
ml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s
ml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s
ml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s
ml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s
mysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s
workflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s

추가로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80

웹 브라우저를 열어 http://localhost:8888/#/pipelines/ 경로에 접속합니다.

다음과 같은 화면이 출력되는 것을 확인합니다.

pipeline-ui

  • localhost 연결 거부 이슈

localhost-reject

만약 다음과 같이 localhost에서 연결을 거부했습니다 라는 에러가 출력될 경우, 커맨드로 address 설정을 통해 접근하는 것이 가능합니다.

보안상의 문제가 되지 않는다면, 아래와 같이 0.0.0.0 로 모든 주소의 bind를 열어주는 방향으로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80
  • 위의 옵션으로 실행했음에도 여전히 연결 거부 이슈가 발생할 경우

방화벽 설정으로 접속해 모든 tcp 프로토콜의 포트에 대한 접속을 허가 또는 8888번 포트의 접속 허가를 추가해 접근 권한을 허가해줍니다.

웹 브라우저를 열어 http://<당신의 가상 인스턴스 공인 ip 주소>:8888/#/pipelines/ 경로에 접속하면, ml-pipeline UI 화면이 출력되는 것을 확인할 수 있습니다.

하단에서 진행되는 다른 포트의 경로에 접속할 때도 위의 절차와 동일하게 커맨드를 실행하고, 방화벽에 포트 번호를 추가해주면 실행하는 것이 가능합니다.

Katib

Katib 를 설치합니다.

kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created
serviceaccount/katib-controller created
serviceaccount/katib-ui created
clusterrole.rbac.authorization.k8s.io/katib-controller created
clusterrole.rbac.authorization.k8s.io/katib-ui created
clusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created
clusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created
clusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created
clusterrolebinding.rbac.authorization.k8s.io/katib-controller created
clusterrolebinding.rbac.authorization.k8s.io/katib-ui created
configmap/katib-config created
configmap/trial-templates created
secret/katib-mysql-secrets created
service/katib-controller created
service/katib-db-manager created
service/katib-mysql created
service/katib-ui created
persistentvolumeclaim/katib-mysql created
deployment.apps/katib-controller created
deployment.apps/katib-db-manager created
deployment.apps/katib-mysql created
deployment.apps/katib-ui created
certificate.cert-manager.io/katib-webhook-cert created
issuer.cert-manager.io/katib-selfsigned-issuer created
virtualservice.networking.istio.io/katib-ui created
mutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created
validatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep katib

다음과 같이 총 4 개의 pod 가 Running 이 될 때까지 기다립니다.

katib-controller-68c47fbf8b-b985z                        1/1     Running   0          82s
katib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s
katib-mysql-7894994f88-scs62 1/1 Running 0 82s
katib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s

추가로 katib UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward svc/katib-ui -n kubeflow 8081:80

웹 브라우저를 열어 http://localhost:8081/katib/ 경로에 접속합니다.

다음과 같은 화면이 출력되는 것을 확인합니다.

katib-ui

Central Dashboard

Dashboard 를 설치합니다.

kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

serviceaccount/centraldashboard created
role.rbac.authorization.k8s.io/centraldashboard created
clusterrole.rbac.authorization.k8s.io/centraldashboard created
rolebinding.rbac.authorization.k8s.io/centraldashboard created
clusterrolebinding.rbac.authorization.k8s.io/centraldashboard created
configmap/centraldashboard-config created
configmap/centraldashboard-parameters created
service/centraldashboard created
deployment.apps/centraldashboard created
virtualservice.networking.istio.io/centraldashboard created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep centraldashboard

kubeflow namespace 에 centraldashboard 관련 1 개의 pod 가 Running 이 될 때까지 기다립니다.

centraldashboard-8fc7d8cc-xl7ts                          1/1     Running   0          52s

추가로 Central Dashboard UI가 정상적으로 접속되는지 확인합니다.

kubectl port-forward svc/centraldashboard -n kubeflow 8082:80

웹 브라우저를 열어 http://localhost:8082/ 경로에 접속합니다.

다음과 같은 화면이 출력되는 것을 확인합니다.

central-dashboard

Admission Webhook

kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created
serviceaccount/admission-webhook-service-account created
clusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created
clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created
clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created
clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created
clusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created
service/admission-webhook-service created
deployment.apps/admission-webhook-deployment created
certificate.cert-manager.io/admission-webhook-cert created
issuer.cert-manager.io/admission-webhook-selfsigned-issuer created
mutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep admission-webhook

1 개의 pod 가 Running 이 될 때까지 기다립니다.

admission-webhook-deployment-667bd68d94-2hhrx            1/1     Running   0          11s

Notebooks & Jupyter Web App

  1. Notebook controller 를 설치합니다.

    kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created
    serviceaccount/notebook-controller-service-account created
    role.rbac.authorization.k8s.io/notebook-controller-leader-election-role created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created
    clusterrole.rbac.authorization.k8s.io/notebook-controller-role created
    rolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created
    configmap/notebook-controller-config-m44cmb547t created
    service/notebook-controller-service created
    deployment.apps/notebook-controller-deployment created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep notebook-controller

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    notebook-controller-deployment-75b4f7b578-w4d4l          1/1     Running   0          105s
  2. Jupyter Web App 을 설치합니다.

    kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/jupyter-web-app-service-account created
    role.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created
    clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created
    rolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created
    clusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created
    configmap/jupyter-web-app-config-76844k4cd7 created
    configmap/jupyter-web-app-logos created
    configmap/jupyter-web-app-parameters-chmg88cm48 created
    service/jupyter-web-app-service created
    deployment.apps/jupyter-web-app-deployment created
    virtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep jupyter-web-app

    1개의 pod 가 Running 이 될 때까지 기다립니다.

    jupyter-web-app-deployment-6f744fbc54-p27ts              1/1     Running   0          2m

Profiles + KFAM

Profile Controller를 설치합니다.

kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created
serviceaccount/profiles-controller-service-account created
role.rbac.authorization.k8s.io/profiles-leader-election-role created
rolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created
clusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created
configmap/namespace-labels-data-48h7kd55mc created
configmap/profiles-config-46c7tgh6fd created
service/profiles-kfam created
deployment.apps/profiles-deployment created
virtualservice.networking.istio.io/profiles-kfam created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep profiles-deployment

1 개의 pod 가 Running 이 될 때까지 기다립니다.

profiles-deployment-89f7d88b-qsnrd                       2/2     Running   0          42s

Volumes Web App

Volumes Web App 을 설치합니다.

kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

serviceaccount/volumes-web-app-service-account created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created
clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created
clusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created
configmap/volumes-web-app-parameters-4gg8cm2gmk created
service/volumes-web-app-service created
deployment.apps/volumes-web-app-deployment created
virtualservice.networking.istio.io/volumes-web-app-volumes-web-app created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep volumes-web-app

1개의 pod가 Running 이 될 때까지 기다립니다.

volumes-web-app-deployment-8589d664cc-62svl              1/1     Running   0          27s

Tensorboard & Tensorboard Web App

  1. Tensorboard Web App 를 설치합니다.

    kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/tensorboards-web-app-service-account created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created
    clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created
    clusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created
    configmap/tensorboards-web-app-parameters-g28fbd6cch created
    service/tensorboards-web-app-service created
    deployment.apps/tensorboards-web-app-deployment created
    virtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep tensorboards-web-app

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    tensorboards-web-app-deployment-6ff79b7f44-qbzmw            1/1     Running             0          22s
  2. Tensorboard Controller 를 설치합니다.

    kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created
    serviceaccount/tensorboard-controller created
    role.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created
    clusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created
    clusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created
    rolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created
    configmap/tensorboard-controller-config-bf88mm96c8 created
    service/tensorboard-controller-controller-manager-metrics-service created
    deployment.apps/tensorboard-controller-controller-manager created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep tensorboard-controller

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    tensorboard-controller-controller-manager-954b7c544-vjpzj   3/3     Running   1          73s

Training Operator

Training Operator 를 설치합니다.

kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created
customresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created
serviceaccount/training-operator created
clusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created
clusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created
clusterrole.rbac.authorization.k8s.io/kubeflow-training-view created
clusterrole.rbac.authorization.k8s.io/training-operator created
clusterrolebinding.rbac.authorization.k8s.io/training-operator created
service/training-operator created
deployment.apps/training-operator created

정상적으로 설치되었는지 확인합니다.

kubectl get po -n kubeflow | grep training-operator

1 개의 pod 가 Running 이 될 때까지 기다립니다.

training-operator-7d98f9dd88-6887f                          1/1     Running   0          28s

User Namespace

Kubeflow 사용을 위해, 사용할 User의 Kubeflow Profile 을 생성합니다.

kustomize build common/user-namespace/base | kubectl apply -f -

정상적으로 수행되면 다음과 같이 출력됩니다.

configmap/default-install-config-9h2h2b6hbk created
profile.kubeflow.org/kubeflow-user-example-com created

kubeflow-user-example-com profile 이 생성된 것을 확인합니다.

kubectl get profile
kubeflow-user-example-com   37s

정상 설치 확인

Kubeflow central dashboard에 web browser로 접속하기 위해 포트 포워딩합니다.

kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

Web Browser 를 열어 http://localhost:8080 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

login-ui

다음 접속 정보를 입력하여 접속합니다.

  • Email Address: user@example.com
  • Password: 12341234

central-dashboard

+ \ No newline at end of file diff --git a/docs/1.0/setup-components/install-components-mlflow/index.html b/docs/1.0/setup-components/install-components-mlflow/index.html index ddeb662e..884b00af 100644 --- a/docs/1.0/setup-components/install-components-mlflow/index.html +++ b/docs/1.0/setup-components/install-components-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ 그래서 MLflow에서 관리하는 데이터를 저장하고 UI를 제공하는 MLflow Tracking Server를 쿠버네티스 클러스터에 배포하여 사용할 예정입니다.

Before Install MLflow Tracking Server

PostgreSQL DB 설치

MLflow Tracking Server가 Backend Store로 사용할 용도의 PostgreSQL DB를 쿠버네티스 클러스터에 배포합니다.

먼저 mlflow-system이라는 namespace 를 생성합니다.

kubectl create ns mlflow-system

다음과 같은 메시지가 출력되면 정상적으로 생성된 것을 의미합니다.

namespace/mlflow-system created

postgresql DB를 mlflow-system namespace 에 생성합니다.

kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml 

정상적으로 수행되면 다음과 같이 출력됩니다.

service/postgresql-mlflow-service created
deployment.apps/postgresql-mlflow created
persistentvolumeclaim/postgresql-mlflow-pvc created

mlflow-system namespace 에 1개의 postgresql 관련 pod 가 Running 이 될 때까지 기다립니다.

kubectl get pod -n mlflow-system | grep postgresql

다음과 비슷하게 출력되면 정상적으로 실행된 것입니다.

postgresql-mlflow-7b9bc8c79f-srkh7   1/1     Running   0          38s

Minio 설정

MLflow Tracking Server가 Artifacts Store로 사용할 용도의 Minio는 이전 Kubeflow 설치 단계에서 설치한 Minio를 활용합니다.
단, kubeflow 용도와 mlflow 용도를 분리하기 위해, mlflow 전용 버킷(bucket)을 생성하겠습니다.
minio 에 접속하여 버킷을 생성하기 위해, 우선 minio-service 를 포트포워딩합니다.

kubectl port-forward svc/minio-service -n kubeflow 9000:9000

웹 브라우저를 열어 localhost:9000으로 접속하면 다음과 같은 화면이 출력됩니다.

minio-install

다음과 같은 접속 정보를 입력하여 로그인합니다.

  • Username: minio
  • Password: minio123

우측 하단의 + 버튼을 클릭하여, Create Bucket를 클릭합니다.

create-bucket

Bucket Namemlflow를 입력하여 버킷을 생성합니다.

정상적으로 생성되면 다음과 같이 왼쪽에 mlflow라는 이름의 버킷이 생성됩니다.

mlflow-bucket


Let's Install MLflow Tracking Server

Helm Repository 추가

helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts

다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

"mlops-for-all" has been added to your repositories

Helm Repository 업데이트

helm repo update

다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

Hang tight while we grab the latest from your chart repositories...
...Successfully got an update from the "mlops-for-all" chart repository
Update Complete. ⎈Happy Helming!

Helm Install

mlflow-server Helm Chart 0.2.0 버전을 설치합니다.

helm install mlflow-server mlops-for-all/mlflow-server \
--namespace mlflow-system \
--version 0.2.0
  • 주의: 위의 helm chart는 MLflow 의 backend store 와 artifacts store 의 접속 정보를 kubeflow 설치 과정에서 생성한 minio와 위의 PostgreSQL DB 설치에서 생성한 postgresql 정보를 default로 하여 설치합니다.
    • 별개로 생성한 DB 혹은 Object storage를 활용하고 싶은 경우, Helm Chart Repo를 참고하여 helm install 시 value를 따로 설정하여 설치하시기 바랍니다.

다음과 같은 메시지가 출력되어야 합니다.

NAME: mlflow-server
LAST DEPLOYED: Sat Dec 18 22:02:13 2021
NAMESPACE: mlflow-system
STATUS: deployed
REVISION: 1
TEST SUITE: None

정상적으로 설치되었는지 확인합니다.

kubectl get pod -n mlflow-system | grep mlflow-server

mlflow-system namespace 에 1 개의 mlflow-server 관련 pod 가 Running 이 될 때까지 기다립니다.
-다음과 비슷하게 출력되면 정상적으로 실행된 것입니다.

mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

정상 설치 확인

그럼 이제 MLflow Server에 정상적으로 접속되는지 확인해보겠습니다.

우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

웹 브라우저를 열어 localhost:5000으로 접속하면 다음과 같은 화면이 출력됩니다.

mlflow-install

- +다음과 비슷하게 출력되면 정상적으로 실행된 것입니다.

mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

정상 설치 확인

그럼 이제 MLflow Server에 정상적으로 접속되는지 확인해보겠습니다.

우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

웹 브라우저를 열어 localhost:5000으로 접속하면 다음과 같은 화면이 출력됩니다.

mlflow-install

+ \ No newline at end of file diff --git a/docs/1.0/setup-components/install-components-pg/index.html b/docs/1.0/setup-components/install-components-pg/index.html index 3cd17325..7f1c172e 100644 --- a/docs/1.0/setup-components/install-components-pg/index.html +++ b/docs/1.0/setup-components/install-components-pg/index.html @@ -7,15 +7,15 @@ - +
버전: 1.0

4. Prometheus & Grafana

Prometheus & Grafana

프로메테우스(Prometheus) 와 그라파나(Grafana) 는 모니터링을 위한 도구입니다.
안정적인 서비스 운영을 위해서는 서비스와 서비스가 운영되고 있는 인프라의 상태를 지속해서 관찰하고, 관찰한 메트릭을 바탕으로 문제가 생길 때 빠르게 대응해야 합니다.
-이러한 모니터링을 효율적으로 수행하기 위한 많은 도구 중 모두의 MLOps에서는 오픈소스인 프로메테우스와 그라파나를 사용할 예정입니다.

더 자세한 내용은 Prometheus 공식 문서, Grafana 공식 문서를 확인해주시기를 바랍니다.

프로메테우스는 다양한 대상으로부터 Metric을 수집하는 도구이며, 그라파나는 모인 데이터를 시각화하는 것을 도와주는 도구입니다. 서로 간의 종속성은 없지만 상호 보완적으로 사용할 수 있어 함께 사용되는 경우가 많습니다.

이번 페이지에서는 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치한 뒤, Seldon-Core 로 생성한 SeldonDeployment 로 API 요청을 보내, 정상적으로 Metrics 이 수집되는지 확인해보겠습니다.

본 글에서는 seldonio/seldon-core-analytics Helm Chart 1.12.0 버전을 활용해 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치하고, Seldon-Core 에서 생성한 SeldonDeployment의 Metrics 을 효율적으로 확인하기 위한 대시보드도 함께 설치합니다.

Helm Repository 추가

helm repo add seldonio https://storage.googleapis.com/seldon-charts

다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

"seldonio" has been added to your repositories

Helm Repository 업데이트

helm repo update

다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

Hang tight while we grab the latest from your chart repositories...
...Successfully got an update from the "seldonio" chart repository
...Successfully got an update from the "datawire" chart repository
Update Complete. ⎈Happy Helming!

Helm Install

seldon-core-analytics Helm Chart 1.12.0 버전을 설치합니다.

helm install seldon-core-analytics seldonio/seldon-core-analytics \
--namespace seldon-system \
--version 1.12.0

다음과 같은 메시지가 출력되어야 합니다.

생략...
NAME: seldon-core-analytics
LAST DEPLOYED: Tue Dec 14 18:29:38 2021
NAMESPACE: seldon-system
STATUS: deployed
REVISION: 1

정상적으로 설치되었는지 확인합니다.

kubectl get pod -n seldon-system | grep seldon-core-analytics

seldon-system namespace 에 6개의 seldon-core-analytics 관련 pod 가 Running 이 될 때까지 기다립니다.

seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

정상 설치 확인

그럼 이제 그라파나에 정상적으로 접속되는지 확인해보겠습니다.

우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

웹 브라우저를 열어 localhost:8090으로 접속하면 다음과 같은 화면이 출력됩니다.

grafana-install

다음과 같은 접속정보를 입력하여 접속합니다.

  • Email or username : admin
  • Password : password

로그인하면 다음과 같은 화면이 출력됩니다.

grafana-login

좌측의 대시보드 아이콘을 클릭하여, Manage 버튼을 클릭합니다.

dashboard-click

기본적인 그라파나 대시보드가 포함되어있는 것을 확인할 수 있습니다. 이 중 Prediction Analytics 대시보드를 클릭합니다.

dashboard

Seldon Core API Dashboard 가 보이고, 다음과 같이 출력되는 것을 확인할 수 있습니다.

seldon-dashboard

References

- +이러한 모니터링을 효율적으로 수행하기 위한 많은 도구 중 모두의 MLOps에서는 오픈소스인 프로메테우스와 그라파나를 사용할 예정입니다.

더 자세한 내용은 Prometheus 공식 문서, Grafana 공식 문서를 확인해주시기를 바랍니다.

프로메테우스는 다양한 대상으로부터 Metric을 수집하는 도구이며, 그라파나는 모인 데이터를 시각화하는 것을 도와주는 도구입니다. 서로 간의 종속성은 없지만 상호 보완적으로 사용할 수 있어 함께 사용되는 경우가 많습니다.

이번 페이지에서는 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치한 뒤, Seldon-Core 로 생성한 SeldonDeployment 로 API 요청을 보내, 정상적으로 Metrics 이 수집되는지 확인해보겠습니다.

본 글에서는 seldonio/seldon-core-analytics Helm Chart 1.12.0 버전을 활용해 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치하고, Seldon-Core 에서 생성한 SeldonDeployment의 Metrics 을 효율적으로 확인하기 위한 대시보드도 함께 설치합니다.

Helm Repository 추가

helm repo add seldonio https://storage.googleapis.com/seldon-charts

다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

"seldonio" has been added to your repositories

Helm Repository 업데이트

helm repo update

다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

Hang tight while we grab the latest from your chart repositories...
...Successfully got an update from the "seldonio" chart repository
...Successfully got an update from the "datawire" chart repository
Update Complete. ⎈Happy Helming!

Helm Install

seldon-core-analytics Helm Chart 1.12.0 버전을 설치합니다.

helm install seldon-core-analytics seldonio/seldon-core-analytics \
--namespace seldon-system \
--version 1.12.0

다음과 같은 메시지가 출력되어야 합니다.

생략...
NAME: seldon-core-analytics
LAST DEPLOYED: Tue Dec 14 18:29:38 2021
NAMESPACE: seldon-system
STATUS: deployed
REVISION: 1

정상적으로 설치되었는지 확인합니다.

kubectl get pod -n seldon-system | grep seldon-core-analytics

seldon-system namespace 에 6개의 seldon-core-analytics 관련 pod 가 Running 이 될 때까지 기다립니다.

seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

정상 설치 확인

그럼 이제 그라파나에 정상적으로 접속되는지 확인해보겠습니다.

우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

웹 브라우저를 열어 localhost:8090으로 접속하면 다음과 같은 화면이 출력됩니다.

grafana-install

다음과 같은 접속정보를 입력하여 접속합니다.

  • Email or username : admin
  • Password : password

로그인하면 다음과 같은 화면이 출력됩니다.

grafana-login

좌측의 대시보드 아이콘을 클릭하여, Manage 버튼을 클릭합니다.

dashboard-click

기본적인 그라파나 대시보드가 포함되어있는 것을 확인할 수 있습니다. 이 중 Prediction Analytics 대시보드를 클릭합니다.

dashboard

Seldon Core API Dashboard 가 보이고, 다음과 같이 출력되는 것을 확인할 수 있습니다.

seldon-dashboard

References

+ \ No newline at end of file diff --git a/docs/1.0/setup-components/install-components-seldon/index.html b/docs/1.0/setup-components/install-components-seldon/index.html index eb288822..38f13342 100644 --- a/docs/1.0/setup-components/install-components-seldon/index.html +++ b/docs/1.0/setup-components/install-components-seldon/index.html @@ -7,15 +7,15 @@ - +
버전: 1.0

3. Seldon-Core

Seldon-Core

Seldon-Core는 쿠버네티스 환경에 수많은 머신러닝 모델을 배포하고 관리할 수 있는 오픈소스 프레임워크 중 하나입니다.
더 자세한 내용은 Seldon-Core 의 공식 제품 설명 페이지깃헙 그리고 API Deployment 파트를 참고해주시기를 바랍니다.

Selon-Core 설치

Seldon-Core를 사용하기 위해서는 쿠버네티스의 인그레스(Ingress)를 담당하는 Ambassador 와 Istio 와 같은 모듈이 필요합니다.
-Seldon-Core 에서는 Ambassador 와 Istio 만을 공식적으로 지원하며, 모두의 MLOps에서는 Ambassador를 사용해 Seldon-core를 사용하므로 Ambassador를 설치하겠습니다.

Ambassador - Helm Repository 추가

helm repo add datawire https://www.getambassador.io

다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

"datawire" has been added to your repositories

Ambassador - Helm Repository 업데이트

helm repo update

다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

Hang tight while we grab the latest from your chart repositories...
...Successfully got an update from the "datawire" chart repository
Update Complete. ⎈Happy Helming!

Ambassador - Helm Install

ambassador Chart 6.9.3 버전을 설치합니다.

helm install ambassador datawire/ambassador \
--namespace seldon-system \
--create-namespace \
--set image.repository=quay.io/datawire/ambassador \
--set enableAES=false \
--set crds.keep=false \
--version 6.9.3

다음과 같은 메시지가 출력되어야 합니다.

생략...

W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
NAME: ambassador
LAST DEPLOYED: Mon Dec 6 17:01:34 2021
NAMESPACE: seldon-system
STATUS: deployed
REVISION: 1
NOTES:
-------------------------------------------------------------------------------
Congratulations! You've successfully installed Ambassador!

-------------------------------------------------------------------------------
To get the IP address of Ambassador, run the following commands:
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

On GKE/Azure:
export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

On AWS:
export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

echo http://$SERVICE_IP:

For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

seldon-system 에 4 개의 pod 가 Running 이 될 때까지 기다립니다.

kubectl get pod -n seldon-system
ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

Seldon-Core - Helm Install

seldon-core-operator Chart 1.11.2 버전을 설치합니다.

helm install seldon-core seldon-core-operator \
--repo https://storage.googleapis.com/seldon-charts \
--namespace seldon-system \
--set usageMetrics.enabled=true \
--set ambassador.enabled=true \
--version 1.11.2

다음과 같은 메시지가 출력되어야 합니다.

생략...

W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
NAME: seldon-core
LAST DEPLOYED: Mon Dec 6 17:05:34 2021
NAMESPACE: seldon-system
STATUS: deployed
REVISION: 1
TEST SUITE: None

seldon-system namespace 에 1 개의 seldon-controller-manager pod 가 Running 이 될 때까지 기다립니다.

kubectl get pod -n seldon-system | grep seldon-controller
seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

References

- +Seldon-Core 에서는 Ambassador 와 Istio 만을 공식적으로 지원하며, 모두의 MLOps에서는 Ambassador를 사용해 Seldon-core를 사용하므로 Ambassador를 설치하겠습니다.

Ambassador - Helm Repository 추가

helm repo add datawire https://www.getambassador.io

다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

"datawire" has been added to your repositories

Ambassador - Helm Repository 업데이트

helm repo update

다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

Hang tight while we grab the latest from your chart repositories...
...Successfully got an update from the "datawire" chart repository
Update Complete. ⎈Happy Helming!

Ambassador - Helm Install

ambassador Chart 6.9.3 버전을 설치합니다.

helm install ambassador datawire/ambassador \
--namespace seldon-system \
--create-namespace \
--set image.repository=quay.io/datawire/ambassador \
--set enableAES=false \
--set crds.keep=false \
--version 6.9.3

다음과 같은 메시지가 출력되어야 합니다.

생략...

W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
NAME: ambassador
LAST DEPLOYED: Mon Dec 6 17:01:34 2021
NAMESPACE: seldon-system
STATUS: deployed
REVISION: 1
NOTES:
-------------------------------------------------------------------------------
Congratulations! You've successfully installed Ambassador!

-------------------------------------------------------------------------------
To get the IP address of Ambassador, run the following commands:
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

On GKE/Azure:
export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

On AWS:
export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

echo http://$SERVICE_IP:

For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

seldon-system 에 4 개의 pod 가 Running 이 될 때까지 기다립니다.

kubectl get pod -n seldon-system
ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

Seldon-Core - Helm Install

seldon-core-operator Chart 1.11.2 버전을 설치합니다.

helm install seldon-core seldon-core-operator \
--repo https://storage.googleapis.com/seldon-charts \
--namespace seldon-system \
--set usageMetrics.enabled=true \
--set ambassador.enabled=true \
--version 1.11.2

다음과 같은 메시지가 출력되어야 합니다.

생략...

W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
NAME: seldon-core
LAST DEPLOYED: Mon Dec 6 17:05:34 2021
NAMESPACE: seldon-system
STATUS: deployed
REVISION: 1
TEST SUITE: None

seldon-system namespace 에 1 개의 seldon-controller-manager pod 가 Running 이 될 때까지 기다립니다.

kubectl get pod -n seldon-system | grep seldon-controller
seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

References

+ \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html b/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html index eb80b2be..2163150c 100644 --- a/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html +++ b/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html @@ -7,14 +7,14 @@ - +
버전: 1.0

5. Install Kubernetes Modules

Setup Kubernetes Modules

이번 페이지에서는 클러스터에서 사용할 모듈을 클라이언트 노드에서 설치하는 과정에 관해서 설명합니다.
-앞으로 소개되는 과정은 모두 클라이언트 노드에서 진행됩니다.

Helm

Helm은 쿠버네티스 패키지와 관련된 자원을 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

  1. 현재 폴더에 Helm v3.7.1 버전을 내려받습니다.
  • For Linux amd64

    wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
  • 다른 OS는 공식 홈페이지를 참고하시어, 클라이언트 노드의 OS와 CPU에 맞는 바이너리의 다운 경로를 확인하시기 바랍니다.

  1. helm을 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

    tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
    sudo mv linux-amd64/helm /usr/local/bin/helm
  2. 정상적으로 설치되었는지 확인합니다.

    helm help

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    The Kubernetes package manager

    Common actions for Helm:
  • helm search: search for charts

  • helm pull: download a chart to your local directory to view

  • helm install: upload the chart to Kubernetes

  • helm list: list releases of charts

    Environment variables:

    NameDescription
    $HELM_CACHE_HOMEset an alternative location for storing cached files.
    $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
    $HELM_DATA_HOMEset an alternative location for storing Helm data.

    ...


Kustomize

kustomize 또한 여러 쿠버네티스 리소스를 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

  1. 현재 폴더에 kustomize v3.10.0 버전의 바이너리를 다운받습니다.
  • For Linux amd64

    wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
  • 다른 OS는 kustomize/v3.10.0에서 확인 후 다운로드 받습니다.

  1. kustomize 를 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

    tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
    sudo mv kustomize /usr/local/bin/kustomize
  2. 정상적으로 설치되었는지 확인합니다.

    kustomize help

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    Manages declarative configuration of Kubernetes.
    See https://sigs.k8s.io/kustomize

    Usage:
    kustomize [command]

    Available Commands:
    build Print configuration per contents of kustomization.yaml
    cfg Commands for reading and writing configuration.
    completion Generate shell completion script
    create Create a new kustomization in the current directory
    edit Edits a kustomization file
    fn Commands for running functions against configuration.
    ...

CSI Plugin : Local Path Provisioner

  1. CSI Plugin은 kubernetes 내의 스토리지를 담당하는 모듈입니다. 단일 노드 클러스터에서 쉽게 사용할 수 있는 CSI Plugin인 Local Path Provisioner를 설치합니다.

    kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    namespace/local-path-storage created
    serviceaccount/local-path-provisioner-service-account created
    clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
    clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
    deployment.apps/local-path-provisioner created
    storageclass.storage.k8s.io/local-path created
    configmap/local-path-config created
  2. 또한, 다음과 같이 local-path-storage namespace 에 provisioner pod이 Running 인지 확인합니다.

    kubectl -n local-path-storage get pod

    정상적으로 수행되면 아래와 같이 출력됩니다.

    NAME                                     READY     STATUS    RESTARTS   AGE
    local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
  3. 다음을 수행하여 default storage class로 변경합니다.

    kubectl patch storageclass local-path  -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

    정상적으로 수행되면 아래와 같이 출력됩니다.

    storageclass.storage.k8s.io/local-path patched
  4. default storage class로 설정되었는지 확인합니다.

    kubectl get sc

    다음과 같이 NAME에 local-path (default) 인 storage class가 존재하는 것을 확인합니다.

    NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
    local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
- +앞으로 소개되는 과정은 모두 클라이언트 노드에서 진행됩니다.

Helm

Helm은 쿠버네티스 패키지와 관련된 자원을 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

  1. 현재 폴더에 Helm v3.7.1 버전을 내려받습니다.
  • For Linux amd64

    wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
  • 다른 OS는 공식 홈페이지를 참고하시어, 클라이언트 노드의 OS와 CPU에 맞는 바이너리의 다운 경로를 확인하시기 바랍니다.

  1. helm을 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

    tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
    sudo mv linux-amd64/helm /usr/local/bin/helm
  2. 정상적으로 설치되었는지 확인합니다.

    helm help

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    The Kubernetes package manager

    Common actions for Helm:
  • helm search: search for charts

  • helm pull: download a chart to your local directory to view

  • helm install: upload the chart to Kubernetes

  • helm list: list releases of charts

    Environment variables:

    NameDescription
    $HELM_CACHE_HOMEset an alternative location for storing cached files.
    $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
    $HELM_DATA_HOMEset an alternative location for storing Helm data.

    ...


Kustomize

kustomize 또한 여러 쿠버네티스 리소스를 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

  1. 현재 폴더에 kustomize v3.10.0 버전의 바이너리를 다운받습니다.
  • For Linux amd64

    wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
  • 다른 OS는 kustomize/v3.10.0에서 확인 후 다운로드 받습니다.

  1. kustomize 를 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

    tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
    sudo mv kustomize /usr/local/bin/kustomize
  2. 정상적으로 설치되었는지 확인합니다.

    kustomize help

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    Manages declarative configuration of Kubernetes.
    See https://sigs.k8s.io/kustomize

    Usage:
    kustomize [command]

    Available Commands:
    build Print configuration per contents of kustomization.yaml
    cfg Commands for reading and writing configuration.
    completion Generate shell completion script
    create Create a new kustomization in the current directory
    edit Edits a kustomization file
    fn Commands for running functions against configuration.
    ...

CSI Plugin : Local Path Provisioner

  1. CSI Plugin은 kubernetes 내의 스토리지를 담당하는 모듈입니다. 단일 노드 클러스터에서 쉽게 사용할 수 있는 CSI Plugin인 Local Path Provisioner를 설치합니다.

    kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    namespace/local-path-storage created
    serviceaccount/local-path-provisioner-service-account created
    clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
    clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
    deployment.apps/local-path-provisioner created
    storageclass.storage.k8s.io/local-path created
    configmap/local-path-config created
  2. 또한, 다음과 같이 local-path-storage namespace 에 provisioner pod이 Running 인지 확인합니다.

    kubectl -n local-path-storage get pod

    정상적으로 수행되면 아래와 같이 출력됩니다.

    NAME                                     READY     STATUS    RESTARTS   AGE
    local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
  3. 다음을 수행하여 default storage class로 변경합니다.

    kubectl patch storageclass local-path  -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

    정상적으로 수행되면 아래와 같이 출력됩니다.

    storageclass.storage.k8s.io/local-path patched
  4. default storage class로 설정되었는지 확인합니다.

    kubectl get sc

    다음과 같이 NAME에 local-path (default) 인 storage class가 존재하는 것을 확인합니다.

    NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
    local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
+ \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html b/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html index c42738a2..79ad4e43 100644 --- a/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html +++ b/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ (보안 문제와 관련된 키들은 <...>로 가렸습니다.)

apiVersion: v1
clusters:
- cluster:
certificate-authority-data:
<...>
server: https://127.0.0.1:6443
name: default
contexts:
- context:
cluster: default
user: default
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: default
user:
client-certificate-data:
<...>
client-key-data:
<...>

2. 쿠버네티스 클러스터 셋업

k3s config를 클러스터의 kubeconfig로 사용하기 위해서 복사합니다.

mkdir .kube
sudo cp /etc/rancher/k3s/k3s.yaml .kube/config

복사된 config 파일에 user가 접근할 수 있는 권한을 줍니다.

sudo chown $USER:$USER .kube/config

3. 쿠버네티스 클라이언트 셋업

이제 클러스터에서 설정한 kubeconfig를 로컬로 이동합니다. 로컬에서는 경로를 ~/.kube/config로 설정합니다.

처음 복사한 config 파일에는 server ip가 https://127.0.0.1:6443 으로 되어 있습니다.
이 값을 클러스터의 ip에 맞게 수정합니다.
-(이번 페이지에서 사용하는 클러스터의 ip에 맞춰서 https://192.168.0.19:6443 으로 수정했습니다.)

apiVersion: v1
clusters:
- cluster:
certificate-authority-data:
<...>
server: https://192.168.0.19:6443
name: default
contexts:
- context:
cluster: default
user: default
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: default
user:
client-certificate-data:
<...>
client-key-data:
<...>

4. 쿠버네티스 기본 모듈 설치

Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

  • helm
  • kustomize
  • CSI plugin
  • [Optional] nvidia-docker, nvidia-device-plugin

5. 정상 설치 확인

최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

kubectl get nodes -o wide

다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

6. References

- +(이번 페이지에서 사용하는 클러스터의 ip에 맞춰서 https://192.168.0.19:6443 으로 수정했습니다.)

apiVersion: v1
clusters:
- cluster:
certificate-authority-data:
<...>
server: https://192.168.0.19:6443
name: default
contexts:
- context:
cluster: default
user: default
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: default
user:
client-certificate-data:
<...>
client-key-data:
<...>

4. 쿠버네티스 기본 모듈 설치

Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

  • helm
  • kustomize
  • CSI plugin
  • [Optional] nvidia-docker, nvidia-device-plugin

5. 정상 설치 확인

최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

kubectl get nodes -o wide

다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

6. References

+ \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html b/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html index f51b17aa..05df7e55 100644 --- a/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html +++ b/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ 실수로 이 컴포넌트들의 버전이 변경하면, 예기치 않은 장애를 낳을 수 있으므로 컴포넌트들이 변경되지 않도록 설정합니다.

sudo apt-get update
sudo apt-get install -y apt-transport-https ca-certificates curl &&
sudo curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg &&
echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list &&
sudo apt-get update
sudo apt-get install -y kubelet=1.21.7-00 kubeadm=1.21.7-00 kubectl=1.21.7-00 &&
sudo apt-mark hold kubelet kubeadm kubectl

kubeadm, kubelet, kubectl 이 잘 설치되었는지 확인합니다.

mlops@ubuntu:~$ kubeadm version
kubeadm version: &version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:40:08Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
mlops@ubuntu:~$ kubelet --version
Kubernetes v1.21.7
mlops@ubuntu:~$ kubectl version --client
Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}

이제 kubeadm을 사용하여 쿠버네티스를 설치합니다.

kubeadm config images list
kubeadm config images pull

sudo kubeadm init --pod-network-cidr=10.244.0.0/16

kubectl을 통해서 쿠버네티스 클러스터를 제어할 수 있도록 admin 인증서를 $HOME/.kube/config 경로에 복사합니다.

mkdir -p $HOME/.kube
sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
sudo chown $(id -u):$(id -g) $HOME/.kube/config

CNI를 설치합니다. 쿠버네티스 내부의 네트워크 설정을 전담하는 CNI는 여러 종류가 있으며, 모두의 MLOps에서는 flannel을 사용합니다.

kubectl apply -f https://raw.githubusercontent.com/flannel-io/flannel/v0.13.0/Documentation/kube-flannel.yml

쿠버네티스 노드의 종류에는 크게 마스터 노드워커 노드가 있습니다. 안정성을 위하여 마스터 노드에는 쿠버네티스 클러스터를 제어하는 작업만 실행되도록 하는 것이 일반적이지만, -이 매뉴얼에서는 싱글 클러스터를 가정하고 있으므로 마스터 노드에 모든 종류의 작업이 실행될 수 있도록 설정합니다.

kubectl taint nodes --all node-role.kubernetes.io/master-

3. 쿠버네티스 클라이언트 셋업

클러스터에 생성된 kubeconfig 파일을 클라이언트에 복사하여 kubectl을 통해 클러스터를 제어할 수 있도록 합니다.

mkdir -p $HOME/.kube
scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

4. 쿠버네티스 기본 모듈 설치

Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

  • helm
  • kustomize
  • CSI plugin
  • [Optional] nvidia-docker, nvidia-device-plugin

5. 정상 설치 확인

다음 명령어를 통해 노드의 STATUS가 Ready 상태가 되었는지 확인합니다.

kubectl get nodes

Ready 가 되면 다음과 비슷한 결과가 출력됩니다.

NAME     STATUS   ROLES                  AGE     VERSION
ubuntu Ready control-plane,master 2m55s v1.21.7

6. References

- +이 매뉴얼에서는 싱글 클러스터를 가정하고 있으므로 마스터 노드에 모든 종류의 작업이 실행될 수 있도록 설정합니다.

kubectl taint nodes --all node-role.kubernetes.io/master-

3. 쿠버네티스 클라이언트 셋업

클러스터에 생성된 kubeconfig 파일을 클라이언트에 복사하여 kubectl을 통해 클러스터를 제어할 수 있도록 합니다.

mkdir -p $HOME/.kube
scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

4. 쿠버네티스 기본 모듈 설치

Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

  • helm
  • kustomize
  • CSI plugin
  • [Optional] nvidia-docker, nvidia-device-plugin

5. 정상 설치 확인

다음 명령어를 통해 노드의 STATUS가 Ready 상태가 되었는지 확인합니다.

kubectl get nodes

Ready 가 되면 다음과 비슷한 결과가 출력됩니다.

NAME     STATUS   ROLES                  AGE     VERSION
ubuntu Ready control-plane,master 2m55s v1.21.7

6. References

+ \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html b/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html index ef3065f1..f25caf2a 100644 --- a/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html +++ b/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html @@ -7,7 +7,7 @@ - + @@ -26,8 +26,8 @@ users:
  • name: minikube user: client-certificate-data: LS0tLS1CRUdJTi.... -client-key-data: LS0tLS1CRUdJTiBSU0....

    1. 클라이언트 노드에서 .kube 폴더를 생성합니다.

      # 클라이언트 노드
      mkdir -p /home/$USER/.kube
    2. 해당 파일에 2. 에서 출력된 정보를 붙여넣은 뒤 저장합니다.

      vi /home/$USER/.kube/config

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

    kubectl get nodes -o wide

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    - +client-key-data: LS0tLS1CRUdJTiBSU0....

    1. 클라이언트 노드에서 .kube 폴더를 생성합니다.

      # 클라이언트 노드
      mkdir -p /home/$USER/.kube
    2. 해당 파일에 2. 에서 출력된 정보를 붙여넣은 뒤 저장합니다.

      vi /home/$USER/.kube/config

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

    kubectl get nodes -o wide

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    + \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/install-prerequisite/index.html b/docs/1.0/setup-kubernetes/install-prerequisite/index.html index 44c4a916..032fa30b 100644 --- a/docs/1.0/setup-kubernetes/install-prerequisite/index.html +++ b/docs/1.0/setup-kubernetes/install-prerequisite/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    3. Install Prerequisite

    이 페이지에서는 쿠버네티스를 설치하기에 앞서, 클러스터클라이언트에 설치 혹은 설정해두어야 하는 컴포넌트들에 대한 매뉴얼을 설명합니다.

    Install apt packages

    추후 클라이언트와 클러스터의 원활한 통신을 위해서는 Port-Forwarding을 수행해야 할 일이 있습니다. Port-Forwarding을 위해서는 클러스터에 다음 패키지를 설치해 주어야 합니다.

    sudo apt-get update
    sudo apt-get install -y socat

    Install Docker

    1. 도커 설치에 필요한 APT 패키지들을 설치합니다.

      sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release
    2. 도커의 공식 GPG key를 추가합니다.

      curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
    3. apt 패키지 매니저로 도커를 설치할 때, stable Repository에서 받아오도록 설정합니다.

      echo \
      "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
      $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
    4. 현재 설치할 수 있는 도커 버전을 확인합니다.

      sudo apt-get update && apt-cache madison docker-ce

      출력되는 버전 중 5:20.10.11~3-0~ubuntu-focal 버전이 있는지 확인합니다.

      apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal

      정상적으로 추가가 된 경우 다음과 같이 출력됩니다.

      docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages
    5. 5:20.10.11~3-0~ubuntu-focal 버전의 도커를 설치합니다.

      sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal
    6. 도커가 정상적으로 설치된 것을 확인합니다.

      sudo docker run hello-world

      명령어 실행 후 다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      mlops@ubuntu:~$ sudo docker run hello-world

      Hello from Docker!
      This message shows that your installation appears to be working correctly.

      To generate this message, Docker took the following steps:
      1. The Docker client contacted the Docker daemon.
      2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
      (amd64)
      3. The Docker daemon created a new container from that image which runs the
      executable that produces the output you are currently reading.
      4. The Docker daemon streamed that output to the Docker client, which sent it
      to your terminal.

      To try something more ambitious, you can run an Ubuntu container with:
      $ docker run -it ubuntu bash

      Share images, automate workflows, and more with a free Docker ID:
      https://hub.docker.com/

      For more examples and ideas, visit:
      https://docs.docker.com/get-started/
    7. docker 관련 command를 sudo 키워드 없이 사용할 수 있게 하도록 다음 명령어를 통해 권한을 추가합니다.

      sudo groupadd docker
      sudo usermod -aG docker $USER
      newgrp docker
    8. sudo 키워드 없이 docker command를 사용할 수 있게 된 것을 확인하기 위해, 다시 한번 docker run을 실행합니다.

      docker run hello-world

      명령어 실행 후 다음과 같은 메시지가 보이면 정상적으로 권한이 추가된 것을 의미합니다.

      mlops@ubuntu:~$ docker run hello-world

      Hello from Docker!
      This message shows that your installation appears to be working correctly.

      To generate this message, Docker took the following steps:
      1. The Docker client contacted the Docker daemon.
      2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
      (amd64)
      3. The Docker daemon created a new container from that image which runs the
      executable that produces the output you are currently reading.
      4. The Docker daemon streamed that output to the Docker client, which sent it
      to your terminal.

      To try something more ambitious, you can run an Ubuntu container with:
      $ docker run -it ubuntu bash

      Share images, automate workflows, and more with a free Docker ID:
      https://hub.docker.com/

      For more examples and ideas, visit:
      https://docs.docker.com/get-started/

    Turn off Swap Memory

    kubelet 이 정상적으로 동작하게 하기 위해서는 클러스터 노드에서 swap이라고 불리는 가상메모리를 꺼 두어야 합니다. 다음 명령어를 통해 swap을 꺼 둡니다.
    (클러스터와 클라이언트를 같은 데스크톱에서 사용할 때 swap 메모리를 종료하면 속도의 저하가 있을 수 있습니다)

    sudo sed -i '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab
    sudo swapoff -a

    Install Kubectl

    kubectl 은 쿠버네티스 클러스터에 API를 요청할 때 사용하는 클라이언트 툴입니다. 클라이언트 노드에 설치해두어야 합니다.

    1. 현재 폴더에 kubectl v1.21.7 버전을 다운받습니다.

      curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl
    2. kubectl 을 사용할 수 있도록 파일의 권한과 위치를 변경합니다.

      sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
    3. 정상적으로 설치되었는지 확인합니다.

      kubectl version --client

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    4. 여러 개의 쿠버네티스 클러스터를 사용하는 경우, 여러 개의 kubeconfig 파일을 관리해야 하는 경우가 있습니다.
      -여러 개의 kubeconfig 파일 혹은 여러 개의 kube-context를 효율적으로 관리하는 방법은 다음과 같은 문서를 참고하시기 바랍니다.

    References

    - +여러 개의 kubeconfig 파일 혹은 여러 개의 kube-context를 효율적으로 관리하는 방법은 다음과 같은 문서를 참고하시기 바랍니다.

    References

    + \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/intro/index.html b/docs/1.0/setup-kubernetes/intro/index.html index cb26dfec..71165ca1 100644 --- a/docs/1.0/setup-kubernetes/intro/index.html +++ b/docs/1.0/setup-kubernetes/intro/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 클러스터 는 우분투가 설치되어 있는 데스크톱 하나를 의미합니다.
    클라이언트 는 노트북 혹은 클러스터가 설치되어 있는 데스크톱 외의 클라이언트로 사용할 수 있는 다른 데스크톱을 사용하는 것을 권장합니다.
    하지만 두 대의 머신을 준비할 수 없다면 데스크톱 하나를 동시에 클러스터와 클라이언트 용도로 사용하셔도 괜찮습니다.

    클러스터

    1. Software

    아래는 클러스터에 설치해야 할 소프트웨어 목록입니다.

    SoftwareVersion
    Ubuntu20.04.3 LTS
    Docker (Server)20.10.11
    NVIDIA-Driver470.86
    Kubernetesv1.21.7
    Kubeflowv1.4.0
    MLFlowv1.21.0

    2. Helm Chart

    아래는 Helm을 이용해 설치되어야 할 써드파티 소프트웨어 목록입니다.

    Helm Chart Repo NameVersion
    datawire/ambassador6.9.3
    seldonio/seldon-core-operator1.11.2

    클라이언트

    클라이언트는 MacOS (Intel CPU), Ubuntu 20.04 에서 검증되었습니다.

    SoftwareVersion
    kubectlv1.21.7
    helmv3.7.1
    kustomizev3.10.0

    Minimum System Requirements

    모두의 MLOps를 설치할 클러스터는 다음과 같은 사양을 만족시키는 것을 권장합니다.
    -이는 Kubernetes 및 Kubeflow 의 권장 사양에 의존합니다.

    • CPU : 6 core
    • RAM : 12GB
    • DISK : 50GB
    • GPU : NVIDIA GPU (Optional)
    - +이는 Kubernetes 및 Kubeflow 의 권장 사양에 의존합니다.

    • CPU : 6 core
    • RAM : 12GB
    • DISK : 50GB
    • GPU : NVIDIA GPU (Optional)
    + \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/kubernetes/index.html b/docs/1.0/setup-kubernetes/kubernetes/index.html index e7e0c6c0..64fbc877 100644 --- a/docs/1.0/setup-kubernetes/kubernetes/index.html +++ b/docs/1.0/setup-kubernetes/kubernetes/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    2. Setup Kubernetes

    Setup Kubernetes Cluster

    쿠버네티스를 처음 배우시는 분들에게 첫 진입 장벽은 쿠버네티스 실습 환경을 구축하는 것입니다.

    프로덕션 레벨의 쿠버네티스 클러스터를 구축할 수 있게 공식적으로 지원하는 도구는 kubeadm 이지만, 사용자들이 조금 더 쉽게 구축할 수 있도록 도와주는 kubespray, kops 등의 도구도 존재하며, 학습 목적을 위해서 컴팩트한 쿠버네티스 클러스터를 정말 쉽게 구축할 수 있도록 도와주는 k3s, minikube, microk8s, kind 등의 도구도 존재합니다.

    각각의 도구는 장단점이 다르기에 사용자마다 선호하는 도구가 다른 점을 고려하여, 본 글에서는 kubeadm, k3s, minikube의 3가지 도구를 활용하여 쿠버네티스 클러스터를 구축하는 방법을 다룹니다. 각 도구에 대한 자세한 비교는 다음 쿠버네티스 공식 문서를 확인해주시기를 바랍니다.

    모두의 MLOps에서 권장하는 툴은 k3s로 쿠버네티스 클러스터를 구축할 때 쉽게 할 수 있다는 장점이 있습니다.
    만약 쿠버네티스의 모든 기능을 사용하고 노드 구성까지 활용하고 싶다면 kubeadm을 권장해 드립니다.
    -minikube 는 저희가 설명하는 컴포넌트 외에도 다른 쿠버네티스를 add-on 형식으로 쉽게 설치할 수 있다는 장점이 있습니다.

    모두의 MLOps에서는 구축하게 될 MLOps 구성 요소들을 원활히 사용하기 위해, 각각의 도구를 활용해 쿠버네티스 클러스터를 구축할 때, 추가로 설정해 주어야 하는 부분이 추가되어 있습니다.

    Ubuntu OS까지는 설치되어 있는 데스크탑을 k8s cluster로 구축한 뒤, 외부 클라이언트 노드에서 쿠버네티스 클러스터에 접근하는 것을 확인하는 것까지가 본 Setup Kubernetes단원의 범위입니다.

    자세한 구축 방법은 3가지 도구마다 다르기에 다음과 같은 흐름으로 구성되어 있습니다.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    그럼 이제 각각의 도구를 활용해 쿠버네티스 클러스터를 구축해보겠습니다. 반드시 모든 도구를 사용해 볼 필요는 없으며, 이 중 여러분이 익숙하신 도구를 활용해주시면 충분합니다.

    - +minikube 는 저희가 설명하는 컴포넌트 외에도 다른 쿠버네티스를 add-on 형식으로 쉽게 설치할 수 있다는 장점이 있습니다.

    모두의 MLOps에서는 구축하게 될 MLOps 구성 요소들을 원활히 사용하기 위해, 각각의 도구를 활용해 쿠버네티스 클러스터를 구축할 때, 추가로 설정해 주어야 하는 부분이 추가되어 있습니다.

    Ubuntu OS까지는 설치되어 있는 데스크탑을 k8s cluster로 구축한 뒤, 외부 클라이언트 노드에서 쿠버네티스 클러스터에 접근하는 것을 확인하는 것까지가 본 Setup Kubernetes단원의 범위입니다.

    자세한 구축 방법은 3가지 도구마다 다르기에 다음과 같은 흐름으로 구성되어 있습니다.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    그럼 이제 각각의 도구를 활용해 쿠버네티스 클러스터를 구축해보겠습니다. 반드시 모든 도구를 사용해 볼 필요는 없으며, 이 중 여러분이 익숙하신 도구를 활용해주시면 충분합니다.

    + \ No newline at end of file diff --git a/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html b/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html index 3afb9f1f..679a07fe 100644 --- a/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html +++ b/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    6. (Optional) Setup GPU

    쿠버네티스 및 Kubeflow 등에서 GP 를 사용하기 위해서는 다음 작업이 필요합니다.

    1. Install NVIDIA Driver

    nvidia-smi 수행 시 다음과 같은 화면이 출력된다면 이 단계는 생략해 주시기 바랍니다.

    mlops@ubuntu:~$ nvidia-smi 
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    | 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |
    | 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |
    | 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |
    +-----------------------------------------------------------------------------+

    nvidia-smi의 출력 결과가 위와 같지 않다면 장착된 GPU에 맞는 nvidia driver를 설치해 주시기 바랍니다.

    만약 nvidia driver의 설치에 익숙하지 않다면 아래 명령어를 통해 설치하시기 바랍니다.

    sudo add-apt-repository ppa:graphics-drivers/ppa
    sudo apt update && sudo apt install -y ubuntu-drivers-common
    sudo ubuntu-drivers autoinstall
    sudo reboot

    2. NVIDIA-Docker 설치

    NVIDIA-Docker를 설치합니다.

    curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \
    sudo apt-key add -
    distribution=$(. /etc/os-release;echo $ID$VERSION_ID)
    curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list
    sudo apt-get update
    sudo apt-get install -y nvidia-docker2 &&
    sudo systemctl restart docker

    정상적으로 설치되었는지 확인하기 위해, GPU를 사용하는 도커 컨테이너를 실행해봅니다.

    sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    +-----------------------------------------------------------------------------+

    3. NVIDIA-Docker를 Default Container Runtime으로 설정

    쿠버네티스는 기본적으로 Docker-CE를 Default Container Runtime으로 사용합니다. 따라서, Docker Container 내에서 NVIDIA GPU를 사용하기 위해서는 NVIDIA-Docker 를 Container Runtime 으로 사용하여 pod를 생성할 수 있도록 Default Runtime을 수정해 주어야 합니다.

    1. /etc/docker/daemon.json 파일을 열어 다음과 같이 수정합니다.

      sudo vi /etc/docker/daemon.json

      {
      "default-runtime": "nvidia",
      "runtimes": {
      "nvidia": {
      "path": "nvidia-container-runtime",
      "runtimeArgs": []
      }
      }
      }
    2. 파일이 변경된 것을 확인한 후, Docker를 재시작합니다.

      sudo systemctl daemon-reload
      sudo service docker restart
    3. 변경 사항이 반영되었는지 확인합니다.

      sudo docker info | grep nvidia

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      mlops@ubuntu:~$ docker info | grep nvidia
      Runtimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc
      Default Runtime: nvidia

    4. Nvidia-Device-Plugin

    1. nvidia-device-plugin daemonset을 생성합니다.

      kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml
    2. nvidia-device-plugin pod이 RUNNING 상태로 생성되었는지 확인합니다.

      kubectl get pod -n kube-system | grep nvidia

      다음과 같은 결과가 출력되어야 합니다.

      kube-system       nvidia-device-plugin-daemonset-nlqh2         1/1     Running   0      1h
    3. node 정보에 gpu가 사용가능하도록 설정되었는지 확인합니다.

      kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\.com/gpu"

      다음과 같은 메시지가 보이면 정상적으로 설정된 것을 의미합니다.
      (모두의 MLOps 에서 실습을 진행한 클러스터는 2개의 GPU가 있어서 2가 출력됩니다. -본인의 클러스터의 GPU 개수와 맞는 숫자가 출력된다면 됩니다.)

      NAME       GPU
      ubuntu 2

    설정되지 않은 경우, GPU의 value가 <None> 으로 표시됩니다.

    - +본인의 클러스터의 GPU 개수와 맞는 숫자가 출력된다면 됩니다.)

    NAME       GPU
    ubuntu 2

    설정되지 않은 경우, GPU의 value가 <None> 으로 표시됩니다.

    + \ No newline at end of file diff --git a/docs/api-deployment/seldon-children/index.html b/docs/api-deployment/seldon-children/index.html index dfc7032f..d8725039 100644 --- a/docs/api-deployment/seldon-children/index.html +++ b/docs/api-deployment/seldon-children/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    6. Multi Models

    Multi Models

    앞서 설명했던 방법들은 모두 단일 모델을 대상으로 했습니다.
    이번 페이지에서는 여러 개의 모델을 연결하는 방법에 대해서 알아봅니다.

    Pipeline

    우선 모델을 2개를 생성하는 파이프라인을 작성하겠습니다.

    모델은 앞서 사용한 SVC 모델에 StandardScaler를 추가하고 저장하도록 하겠습니다.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_scaler_from_csv(
    data_path: InputPath("csv"),
    scaled_data_path: OutputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    ):
    import dill
    import pandas as pd
    from sklearn.preprocessing import StandardScaler

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    data = pd.read_csv(data_path)

    scaler = StandardScaler()
    scaled_data = scaler.fit_transform(data)
    scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)

    scaled_data.to_csv(scaled_data_path, index=False)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(scaler, file_writer)

    input_example = data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(data, scaler.transform(data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["scikit-learn"],
    install_mlflow=False
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_svc_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["scikit-learn"],
    install_mlflow=False
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)
    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    from kfp.dsl import pipeline


    @pipeline(name="multi_model_pipeline")
    def multi_model_pipeline(kernel: str = "rbf"):
    iris_data = load_iris_data()
    scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])
    _ = upload_sklearn_model_to_mlflow(
    model_name="scaler",
    model=scaled_data.outputs["model"],
    input_example=scaled_data.outputs["input_example"],
    signature=scaled_data.outputs["signature"],
    conda_env=scaled_data.outputs["conda_env"],
    )
    model = train_svc_from_csv(
    train_data=scaled_data.outputs["scaled_data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name="svc",
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")

    파이프라인을 업로드하면 다음과 같이 나옵니다.

    children-kubeflow.png

    MLflow 대시보드를 확인하면 다음과 같이 두 개의 모델이 생성됩니다.

    children-mlflow.png

    각각의 run_id를 확인 후 다음과 같이 SeldonDeployment 스펙을 정의합니다.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    모델이 두 개가 되었으므로 각 모델의 initContainer와 container를 정의해주어야 합니다. 이 필드는 입력값을 array로 받으며 순서는 관계없습니다.

    모델이 실행하는 순서는 graph에서 정의됩니다.

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    graph의 동작 방식은 처음 받은 값을 정해진 predict_method로 변환한 뒤 children으로 정의된 모델에 전달하는 방식입니다. -이 경우 scaler -> svc 로 데이터가 전달됩니다.

    이제 위의 스펙을 yaml파일로 생성해 보겠습니다.

    cat <<EOF > multi-model.yaml
    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    EOF

    다음 명령어를 통해 API를 생성합니다.

    kubectl apply -f multi-model.yaml

    정상적으로 수행되면 다음과 같이 출력됩니다.

    seldondeployment.machinelearning.seldon.io/multi-model-example created

    정상적으로 생성됐는지 확인합니다.

    kubectl get po -n kubeflow-user-example-com | grep multi-model-example

    정상적으로 생성되면 다음과 비슷한 pod이 생성됩니다.

    multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
    - +이 경우 scaler -> svc 로 데이터가 전달됩니다.

    이제 위의 스펙을 yaml파일로 생성해 보겠습니다.

    cat <<EOF > multi-model.yaml
    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    EOF

    다음 명령어를 통해 API를 생성합니다.

    kubectl apply -f multi-model.yaml

    정상적으로 수행되면 다음과 같이 출력됩니다.

    seldondeployment.machinelearning.seldon.io/multi-model-example created

    정상적으로 생성됐는지 확인합니다.

    kubectl get po -n kubeflow-user-example-com | grep multi-model-example

    정상적으로 생성되면 다음과 비슷한 pod이 생성됩니다.

    multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
    + \ No newline at end of file diff --git a/docs/api-deployment/seldon-fields/index.html b/docs/api-deployment/seldon-fields/index.html index ed54530a..2dcdd2c1 100644 --- a/docs/api-deployment/seldon-fields/index.html +++ b/docs/api-deployment/seldon-fields/index.html @@ -7,7 +7,7 @@ - + @@ -28,8 +28,8 @@ 이미지에는 모델이 로드될 때 필요한 패키지들이 모두 설치되어 있어야 합니다.

    Seldon Core에서 지원하는 공식 이미지는 다음과 같습니다.

    • seldonio/sklearnserver
    • seldonio/mlflowserver
    • seldonio/xgboostserver
    • seldonio/tfserving

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true

    initContainer에서 다운로드받은 데이터가 있는 경로를 알려주는 필드입니다.
    이때 모델이 수정되는 것을 방지하기 위해 readOnly: true도 같이 주겠습니다.

    securityContext

    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    필요한 패키지를 설치할 때 pod이 권한이 없어서 패키지 설치를 수행하지 못할 수 있습니다.
    이를 위해서 root 권한을 부여합니다. (다만 이 작업은 실제 서빙 시 보안 문제가 생길 수 있습니다.)

    graph

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    모델이 동작하는 순서를 정의한 필드입니다.

    name

    모델 그래프의 이름입니다. container에서 정의된 이름을 사용합니다.

    type

    type은 크게 4가지가 있습니다.

    1. TRANSFORMER
    2. MODEL
    3. OUTPUT_TRANSFORMER
    4. ROUTER

    각 type에 대한 자세한 설명은 Seldon Core Complex Graphs Metadata Example을 참조 바랍니다.

    parameters

    class init 에서 사용되는 값들입니다.
    -sklearnserver에서 필요한 값은 다음 파일에서 확인할 수 있습니다.

    class SKLearnServer(SeldonComponent):
    def __init__(self, model_uri: str = None, method: str = "predict_proba"):

    코드를 보면 model_urimethod를 정의할 수 있습니다.

    children

    순서도를 작성할 때 사용됩니다. 자세한 내용은 다음 페이지에서 설명합니다.

    - +sklearnserver에서 필요한 값은 다음 파일에서 확인할 수 있습니다.

    class SKLearnServer(SeldonComponent):
    def __init__(self, model_uri: str = None, method: str = "predict_proba"):

    코드를 보면 model_urimethod를 정의할 수 있습니다.

    children

    순서도를 작성할 때 사용됩니다. 자세한 내용은 다음 페이지에서 설명합니다.

    + \ No newline at end of file diff --git a/docs/api-deployment/seldon-iris/index.html b/docs/api-deployment/seldon-iris/index.html index ea435f5a..56f3d299 100644 --- a/docs/api-deployment/seldon-iris/index.html +++ b/docs/api-deployment/seldon-iris/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 배포된 API는 다음과 같은 규칙으로 생성됩니다. http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/

    NODE_IP / NODE_PORT

    Seldon Core 설치 시, Ambassador를 Ingress Controller로 설정하였으므로, SeldonDeployment로 생성된 API 서버는 모두 Ambassador의 Ingress gateway를 통해 요청할 수 있습니다.

    따라서 우선 Ambassador Ingress Gateway의 url을 환경 변수로 설정합니다.

    export NODE_IP=$(kubectl get nodes -o jsonpath='{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }')
    export NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")

    설정된 url을 확인합니다.

    echo "NODE_IP"=$NODE_IP
    echo "NODE_PORT"=$NODE_PORT

    다음과 비슷하게 출력되어야 하며, 클라우드 등을 통해 설정할 경우, internal ip 주소가 설정되는 것을 확인할 수 있습니다.

    NODE_IP=192.168.0.19
    NODE_PORT=30486

    namespace / seldon-deployment-name

    SeldonDeployment가 배포된 namespaceseldon-deployment-name를 의미합니다. 이는 스펙을 정의할 때 metadata에 정의된 값을 사용합니다.

    metadata:
    name: sklearn
    namespace: seldon-deploy

    위의 예시에서는 namespace는 seldon-deploy, seldon-deployment-name은 sklearn 입니다.

    method-name

    SeldonDeployment에서 주로 사용하는 method-name은 두 가지가 있습니다.

    1. doc
    2. predictions

    각각의 method의 자세한 사용 방법은 아래에서 설명합니다.

    Using Swagger

    우선 doc method를 사용하는 방법입니다. doc method를 이용하면 seldon에서 생성한 swagger에 접속할 수 있습니다.

    1. Swagger 접속

    위에서 설명한 ingress url 규칙에 따라 아래 주소를 통해 swagger에 접근할 수 있습니다.
    -http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/

    iris-swagger1.png

    2. Swagger Predictions 메뉴 선택

    UI에서 /seldon/seldon-deploy/sklearn/api/v1.0/predictions 메뉴를 선택합니다.

    iris-swagger2.png

    3. Try it out 선택

    iris-swagger3.png

    4. Request body에 data 입력

    iris-swagger4.png

    다음 데이터를 입력합니다.

    {
    "data": {
    "ndarray":[[1.0, 2.0, 5.0, 6.0]]
    }
    }

    5. 추론 결과 확인

    Execute 버튼을 눌러서 추론 결과를 확인할 수 있습니다.

    iris-swagger5.png

    정상적으로 수행되면 다음과 같은 추론 결과를 얻습니다.

    {
    "data": {
    "names": [
    "t:0",
    "t:1",
    "t:2"
    ],
    "ndarray": [
    [
    9.912315378486697e-7,
    0.0007015931307746079,
    0.9992974156376876
    ]
    ]
    },
    "meta": {
    "requestPath": {
    "classifier": "seldonio/sklearnserver:1.11.2"
    }
    }
    }

    Using CLI

    또한, curl과 같은 http client CLI 도구를 활용해서도 API 요청을 수행할 수 있습니다.

    예를 들어, 다음과 같이 /predictions를 요청하면

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    아래와 같은 응답이 정상적으로 출력되는 것을 확인할 수 있습니다.

    {"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
    - +http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/

    iris-swagger1.png

    2. Swagger Predictions 메뉴 선택

    UI에서 /seldon/seldon-deploy/sklearn/api/v1.0/predictions 메뉴를 선택합니다.

    iris-swagger2.png

    3. Try it out 선택

    iris-swagger3.png

    4. Request body에 data 입력

    iris-swagger4.png

    다음 데이터를 입력합니다.

    {
    "data": {
    "ndarray":[[1.0, 2.0, 5.0, 6.0]]
    }
    }

    5. 추론 결과 확인

    Execute 버튼을 눌러서 추론 결과를 확인할 수 있습니다.

    iris-swagger5.png

    정상적으로 수행되면 다음과 같은 추론 결과를 얻습니다.

    {
    "data": {
    "names": [
    "t:0",
    "t:1",
    "t:2"
    ],
    "ndarray": [
    [
    9.912315378486697e-7,
    0.0007015931307746079,
    0.9992974156376876
    ]
    ]
    },
    "meta": {
    "requestPath": {
    "classifier": "seldonio/sklearnserver:1.11.2"
    }
    }
    }

    Using CLI

    또한, curl과 같은 http client CLI 도구를 활용해서도 API 요청을 수행할 수 있습니다.

    예를 들어, 다음과 같이 /predictions를 요청하면

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    아래와 같은 응답이 정상적으로 출력되는 것을 확인할 수 있습니다.

    {"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
    + \ No newline at end of file diff --git a/docs/api-deployment/seldon-mlflow/index.html b/docs/api-deployment/seldon-mlflow/index.html index 1ef0d517..0ed87eb0 100644 --- a/docs/api-deployment/seldon-mlflow/index.html +++ b/docs/api-deployment/seldon-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ minio에 접근하기 위한 credentials는 다음과 같습니다.

    apiVersion: v1
    type: Opaque
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    data:
    AWS_ACCESS_KEY_ID: bWluaW8K=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp
    USE_SSL: ZmFsc2U=

    AWS_ACCESS_KEY_ID 의 입력값은 minio입니다. 다만 secret의 입력값은 인코딩된 값이여야 되기 때문에 실제로 입력되는 값은 다음을 수행후 나오는 값이어야 합니다.

    data에 입력되어야 하는 값들은 다음과 같습니다.

    인코딩은 다음 명령어를 통해서 할 수 있습니다.

    echo -n minio | base64

    그러면 다음과 같은 값이 출력됩니다.

    bWluaW8=

    인코딩을 전체 값에 대해서 진행하면 다음과 같이 됩니다.

    • AWS_ACCESS_KEY_ID: bWluaW8=
    • AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    • AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
    • USE_SSL: ZmFsc2U=

    다음 명령어를 통해 secret을 생성할 수 있는 yaml파일을 생성합니다.

    cat <<EOF > seldon-init-container-secret.yaml
    apiVersion: v1
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    type: Opaque
    data:
    AWS_ACCESS_KEY_ID: bWluaW8=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
    USE_SSL: ZmFsc2U=
    EOF

    다음 명령어를 통해 secret을 생성합니다.

    kubectl apply -f seldon-init-container-secret.yaml

    정상적으로 수행되면 다음과 같이 출력됩니다.

    secret/seldon-init-container-secret created

    Seldon Core yaml

    이제 Seldon Core를 생성하는 yaml파일을 작성합니다.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    이 전에 작성한 Seldon Fields와 달라진 점은 크게 두 부분입니다. initContainer에 envFrom 필드가 추가되었으며 args의 주소가 s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc 로 바뀌었습니다.

    args

    앞서 args의 첫번째 array는 우리가 다운로드받을 모델의 경로라고 했습니다.
    그럼 mlflow에 저장된 모델의 경로는 어떻게 알 수 있을까요?

    다시 mlflow에 들어가서 run을 클릭하고 모델을 누르면 다음과 같이 확인할 수 있습니다.

    seldon-mlflow-0.png

    이렇게 확인된 경로를 입력하면 됩니다.

    envFrom

    minio에 접근해서 모델을 다운로드 받는 데 필요한 환경변수를 입력해주는 과정입니다. -앞서 만든 seldon-init-container-secret를 이용합니다.

    API 생성

    우선 위에서 정의한 스펙을 yaml 파일로 생성하겠습니다.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: xtype
    type: STRING
    value: "dataframe"
    children: []
    EOF

    seldon pod을 생성합니다.

    kubectl apply -f seldon-mlflow.yaml

    정상적으로 수행되면 다음과 같이 출력됩니다.

    seldondeployment.machinelearning.seldon.io/seldon-example created

    이제 pod이 정상적으로 뜰 때까지 기다립니다.

    kubectl get po -n kubeflow-user-example-com | grep seldon

    다음과 비슷하게 출력되면 정상적으로 API를 생성했습니다.

    seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

    CLI를 이용해 생성된 API에는 다음 request를 통해 실행을 확인할 수 있습니다.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{
    "data": {
    "ndarray": [
    [
    143.0,
    0.0,
    30.0,
    30.0
    ]
    ],
    "names": [
    "sepal length (cm)",
    "sepal width (cm)",
    "petal length (cm)",
    "petal width (cm)"
    ]
    }
    }'

    정상적으로 실행될 경우 다음과 같은 결과를 받을 수 있습니다.

    {"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
    - +앞서 만든 seldon-init-container-secret를 이용합니다.

    API 생성

    우선 위에서 정의한 스펙을 yaml 파일로 생성하겠습니다.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: xtype
    type: STRING
    value: "dataframe"
    children: []
    EOF

    seldon pod을 생성합니다.

    kubectl apply -f seldon-mlflow.yaml

    정상적으로 수행되면 다음과 같이 출력됩니다.

    seldondeployment.machinelearning.seldon.io/seldon-example created

    이제 pod이 정상적으로 뜰 때까지 기다립니다.

    kubectl get po -n kubeflow-user-example-com | grep seldon

    다음과 비슷하게 출력되면 정상적으로 API를 생성했습니다.

    seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

    CLI를 이용해 생성된 API에는 다음 request를 통해 실행을 확인할 수 있습니다.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{
    "data": {
    "ndarray": [
    [
    143.0,
    0.0,
    30.0,
    30.0
    ]
    ],
    "names": [
    "sepal length (cm)",
    "sepal width (cm)",
    "petal length (cm)",
    "petal width (cm)"
    ]
    }
    }'

    정상적으로 실행될 경우 다음과 같은 결과를 받을 수 있습니다.

    {"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
    + \ No newline at end of file diff --git a/docs/api-deployment/seldon-pg/index.html b/docs/api-deployment/seldon-pg/index.html index c193223c..5747b2af 100644 --- a/docs/api-deployment/seldon-pg/index.html +++ b/docs/api-deployment/seldon-pg/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    3. Seldon Monitoring

    Grafana & Prometheus

    이제, 지난 페이지에서 생성했던 SeldonDeployment 로 API Request 를 반복적으로 수행해보고, 대시보드에 변화가 일어나는지 확인해봅니다.

    대시보드

    앞서 생성한 대시보드를 포트 포워딩합니다.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    API 요청

    앞서 생성한 Seldon Deployment에 요청을 반복해서 보냅니다.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    그리고 그라파나 대시보드를 확인하면 다음과 같이 Global Request Rate 이 0 ops 에서 순간적으로 상승하는 것을 확인할 수 있습니다.

    repeat-raise.png

    이렇게 프로메테우스와 그라파나가 정상적으로 설치된 것을 확인할 수 있습니다.

    - +
    버전: 1.0

    3. Seldon Monitoring

    Grafana & Prometheus

    이제, 지난 페이지에서 생성했던 SeldonDeployment 로 API Request 를 반복적으로 수행해보고, 대시보드에 변화가 일어나는지 확인해봅니다.

    대시보드

    앞서 생성한 대시보드를 포트 포워딩합니다.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    API 요청

    앞서 생성한 Seldon Deployment에 요청을 반복해서 보냅니다.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    그리고 그라파나 대시보드를 확인하면 다음과 같이 Global Request Rate 이 0 ops 에서 순간적으로 상승하는 것을 확인할 수 있습니다.

    repeat-raise.png

    이렇게 프로메테우스와 그라파나가 정상적으로 설치된 것을 확인할 수 있습니다.

    + \ No newline at end of file diff --git a/docs/api-deployment/what-is-api-deployment/index.html b/docs/api-deployment/what-is-api-deployment/index.html index cbf9b02c..0025437e 100644 --- a/docs/api-deployment/what-is-api-deployment/index.html +++ b/docs/api-deployment/what-is-api-deployment/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 쿠버네티스 환경에서 이러한 추론 엔진들을 사용하여 API Deployment를 한다면 어떤 작업이 필요할까요? 추론 엔진을 배포하기 위한 Deployment, 추론 요청을 보낼 Endpoint를 생성하기 위한 Service, 외부에서의 추론 요청을 추론 엔진으로 보내기 위한 Ingress 등 많은 쿠버네티스 리소스를 배포해 주어야 합니다. -이것 이외에도, 많은 추론 요청이 들어왔을 경우의 스케일 아웃(scale-out), 추론 엔진 상태에 대한 모니터링, 개선된 모델이 나왔을 경우 버전 업데이트 등 추론 엔진을 운영할 때의 요구사항은 한두 가지가 아닙니다.

    이러한 많은 요구사항을 처리하기 위해 추론 엔진들을 쿠버네티스 환경 위에서 한 번 더 추상화한 Serving Framework들이 개발되었습니다.

    개발된 Serving Framework들은 다음과 같은 오픈소스들이 있습니다.

    모두의 MLOps에서는 Seldon Core를 사용하여 API Deployment를 하는 과정을 다루어 보도록 하겠습니다.

    - +이것 이외에도, 많은 추론 요청이 들어왔을 경우의 스케일 아웃(scale-out), 추론 엔진 상태에 대한 모니터링, 개선된 모델이 나왔을 경우 버전 업데이트 등 추론 엔진을 운영할 때의 요구사항은 한두 가지가 아닙니다.

    이러한 많은 요구사항을 처리하기 위해 추론 엔진들을 쿠버네티스 환경 위에서 한 번 더 추상화한 Serving Framework들이 개발되었습니다.

    개발된 Serving Framework들은 다음과 같은 오픈소스들이 있습니다.

    모두의 MLOps에서는 Seldon Core를 사용하여 API Deployment를 하는 과정을 다루어 보도록 하겠습니다.

    + \ No newline at end of file diff --git a/docs/appendix/metallb/index.html b/docs/appendix/metallb/index.html index 03971c03..1530cad5 100644 --- a/docs/appendix/metallb/index.html +++ b/docs/appendix/metallb/index.html @@ -7,7 +7,7 @@ - + @@ -21,8 +21,8 @@ 추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

    kubectl edit svc/istio-ingressgateway -n istio-system
    spec:
    clusterIP: 10.103.72.5
    clusterIPs:
    - 10.103.72.5
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: status-port
    port: 15021
    protocol: TCP
    targetPort: 15021
    - name: http2
    port: 80
    protocol: TCP
    targetPort: 8080
    - name: https
    port: 443
    protocol: TCP
    targetPort: 8443
    - name: tcp
    port: 31400
    protocol: TCP
    targetPort: 31400
    - name: tls
    port: 15443
    protocol: TCP
    targetPort: 15443
    selector:
    app: istio-ingressgateway
    istio: ingressgateway
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.100 # Add IP
    status:
    loadBalancer: {}

    다시 확인을 해보면 External-IP 값이 192.168.35.100 인 것을 확인합니다.

    kubectl get svc/istio-ingressgateway -n istio-system
    NAME                   TYPE           CLUSTER-IP    EXTERNAL-IP      PORT(S)                                                                      AGE
    istio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m

    Web Browser 를 열어 http://192.168.35.100 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

    login-after-istio-ingressgateway-setting.png

    minio Dashboard

    먼저 minio 의 Dashboard 를 제공하는 kubeflow 네임스페이스의 minio-service 서비스의 타입을 LoadBalancer로 변경하여 MetalLB로부터 로드 벨런싱 기능을 제공받기 전에, 현재 상태를 확인합니다.

    kubectl get svc/minio-service -n kubeflow

    해당 서비스의 타입은 ClusterIP이며, External-IP 값은 none 인 것을 확인할 수 있습니다.

    NAME            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)    AGE
    minio-service ClusterIP 10.109.209.87 <none> 9000/TCP 5h14m

    type 을 LoadBalancer 로 변경하고 원하는 IP 주소를 입력하고 싶은 경우 loadBalancerIP 항목을 추가합니다.
    추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

    kubectl edit svc/minio-service -n kubeflow
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    kubectl.kubernetes.io/last-applied-configuration: |
    {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>
    creationTimestamp: "2022-01-05T08:44:23Z"
    labels:
    application-crd-id: kubeflow-pipelines
    name: minio-service
    namespace: kubeflow
    resourceVersion: "21120"
    uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48
    spec:
    clusterIP: 10.109.209.87
    clusterIPs:
    - 10.109.209.87
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: http
    port: 9000
    protocol: TCP
    targetPort: 9000
    selector:
    app: minio
    application-crd-id: kubeflow-pipelines
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.101 # Add IP
    status:
    loadBalancer: {}

    다시 확인을 해보면 External-IP 값이 192.168.35.101 인 것을 확인할 수 있습니다.

    kubectl get svc/minio-service -n kubeflow
    NAME            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)          AGE
    minio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m

    Web Browser 를 열어 http://192.168.35.101:9000 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

    login-after-minio-setting.png

    mlflow Dashboard

    먼저 mlflow 의 Dashboard 를 제공하는 mlflow-system 네임스페이스의 mlflow-server-service 서비스의 타입을 LoadBalancer로 변경하여 MetalLB로부터 로드 벨런싱 기능을 제공받기 전에, 현재 상태를 확인합니다.

    kubectl get svc/mlflow-server-service -n mlflow-system

    해당 서비스의 타입은 ClusterIP이며, External-IP 값은 none 인 것을 확인할 수 있습니다.

    NAME                    TYPE        CLUSTER-IP       EXTERNAL-IP   PORT(S)    AGE
    mlflow-server-service ClusterIP 10.111.173.209 <none> 5000/TCP 4m50s

    type 을 LoadBalancer 로 변경하고 원하는 IP 주소를 입력하고 싶은 경우 loadBalancerIP 항목을 추가합니다.
    추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

    kubectl edit svc/mlflow-server-service -n mlflow-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: mlflow-server
    meta.helm.sh/release-namespace: mlflow-system
    creationTimestamp: "2022-01-07T04:00:19Z"
    labels:
    app.kubernetes.io/managed-by: Helm
    name: mlflow-server-service
    namespace: mlflow-system
    resourceVersion: "276246"
    uid: e5d39fb7-ad98-47e7-b512-f9c673055356
    spec:
    clusterIP: 10.111.173.209
    clusterIPs:
    - 10.111.173.209
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - port: 5000
    protocol: TCP
    targetPort: 5000
    selector:
    app.kubernetes.io/name: mlflow-server
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.102 # Add IP
    status:
    loadBalancer: {}

    다시 확인을 해보면 External-IP 값이 192.168.35.102 인 것을 확인할 수 있습니다.

    kubectl get svc/mlflow-server-service -n mlflow-system
    NAME                    TYPE           CLUSTER-IP       EXTERNAL-IP      PORT(S)          AGE
    mlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s

    Web Browser 를 열어 http://192.168.35.102:5000 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

    login-after-mlflow-setting.png

    Grafana Dashboard

    먼저 Grafana 의 Dashboard 를 제공하는 seldon-system 네임스페이스의 seldon-core-analytics-grafana 서비스의 타입을 LoadBalancer로 변경하여 MetalLB로부터 로드 벨런싱 기능을 제공받기 전에, 현재 상태를 확인합니다.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system

    해당 서비스의 타입은 ClusterIP이며, External-IP 값은 none 인 것을 확인할 수 있습니다.

    NAME                            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)   AGE
    seldon-core-analytics-grafana ClusterIP 10.109.20.161 <none> 80/TCP 94s

    type 을 LoadBalancer 로 변경하고 원하는 IP 주소를 입력하고 싶은 경우 loadBalancerIP 항목을 추가합니다.
    -추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

    kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: seldon-core-analytics
    meta.helm.sh/release-namespace: seldon-system
    creationTimestamp: "2022-01-07T04:16:47Z"
    labels:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/managed-by: Helm
    app.kubernetes.io/name: grafana
    app.kubernetes.io/version: 7.0.3
    helm.sh/chart: grafana-5.1.4
    name: seldon-core-analytics-grafana
    namespace: seldon-system
    resourceVersion: "280605"
    uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
    spec:
    clusterIP: 10.109.20.161
    clusterIPs:
    - 10.109.20.161
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: service
    port: 80
    protocol: TCP
    targetPort: 3000
    selector:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/name: grafana
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.103 # Add IP
    status:
    loadBalancer: {}

    다시 확인을 해보면 External-IP 값이 192.168.35.103 인 것을 확인할 수 있습니다.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system
    NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
    seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

    Web Browser 를 열어 http://192.168.35.103:80 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

    login-after-grafana-setting.png

    - +추가 하지 않을 경우에는 위에서 설정한 IP 주소풀에서 순차적으로 IP 주소가 배정됩니다.

    kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: seldon-core-analytics
    meta.helm.sh/release-namespace: seldon-system
    creationTimestamp: "2022-01-07T04:16:47Z"
    labels:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/managed-by: Helm
    app.kubernetes.io/name: grafana
    app.kubernetes.io/version: 7.0.3
    helm.sh/chart: grafana-5.1.4
    name: seldon-core-analytics-grafana
    namespace: seldon-system
    resourceVersion: "280605"
    uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
    spec:
    clusterIP: 10.109.20.161
    clusterIPs:
    - 10.109.20.161
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: service
    port: 80
    protocol: TCP
    targetPort: 3000
    selector:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/name: grafana
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.103 # Add IP
    status:
    loadBalancer: {}

    다시 확인을 해보면 External-IP 값이 192.168.35.103 인 것을 확인할 수 있습니다.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system
    NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
    seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

    Web Browser 를 열어 http://192.168.35.103:80 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

    login-after-grafana-setting.png

    + \ No newline at end of file diff --git a/docs/appendix/pyenv/index.html b/docs/appendix/pyenv/index.html index 64b5e297..62aaf9a8 100644 --- a/docs/appendix/pyenv/index.html +++ b/docs/appendix/pyenv/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    1. Python 가상환경 설치

    파이썬 가상환경

    Python 환경을 사용하다 보면 여러 버전의 Python 환경을 사용하고 싶은 경우나, 여러 프로젝트별 패키지 버전을 따로 관리하고 싶은 경우가 발생합니다.

    이처럼 Python 환경 혹은 Python Package 환경을 가상화하여 관리하는 것을 쉽게 도와주는 도구로는 pyenv, conda, virtualenv, venv 등이 존재합니다.

    이 중 모두의 MLOps에서는 pyenvpyenv-virtualenv를 설치하는 방법을 다룹니다.
    pyenv는 Python 버전을 관리하는 것을 도와주며, pyenv-virtualenv는 pyenv의 plugin으로써 파이썬 패키지 환경을 관리하는 것을 도와줍니다.

    pyenv 설치

    Prerequisites

    운영 체제별로 Prerequisites가 다릅니다. 다음 페이지를 참고하여 필수 패키지들을 설치해주시기 바랍니다.

    설치 - macOS

    1. pyenv, pyenv-virtualenv 설치
    brew update
    brew install pyenv
    brew install pyenv-virtualenv
    1. pyenv 설정

    macOS의 경우 카탈리나 버전 이후 기본 shell이 zsh로 변경되었기 때문에 zsh을 사용하는 경우를 가정하였습니다.

    echo 'eval "$(pyenv init -)"' >> ~/.zshrc
    echo 'eval "$(pyenv virtualenv-init -)"' >> ~/.zshrc
    source ~/.zshrc

    pyenv 명령이 정상적으로 수행되는지 확인합니다.

    pyenv --help
    $ pyenv --help
    Usage: pyenv <command> [<args>]

    Some useful pyenv commands are:
    --version Display the version of pyenv
    activate Activate virtual environment
    commands List all available pyenv commands
    deactivate Deactivate virtual environment
    exec Run an executable with the selected Python version
    global Set or show the global Python version(s)
    help Display help for a command
    hooks List hook scripts for a given pyenv command
    init Configure the shell environment for pyenv
    install Install a Python version using python-build
    local Set or show the local application-specific Python version(s)
    prefix Display prefix for a Python version
    rehash Rehash pyenv shims (run this after installing executables)
    root Display the root directory where versions and shims are kept
    shell Set or show the shell-specific Python version
    shims List existing pyenv shims
    uninstall Uninstall a specific Python version
    version Show the current Python version(s) and its origin
    version-file Detect the file that sets the current pyenv version
    version-name Show the current Python version
    version-origin Explain how the current Python version is set
    versions List all Python versions available to pyenv
    virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
    virtualenv-delete Uninstall a specific Python virtualenv
    virtualenv-init Configure the shell environment for pyenv-virtualenv
    virtualenv-prefix Display real_prefix for a Python virtualenv version
    virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
    whence List all Python versions that contain the given executable
    which Display the full path to an executable

    See `pyenv help <command>' for information on a specific command.
    For full documentation, see: https://github.com/pyenv/pyenv#readme

    설치 - Ubuntu

    1. pyenv, pyenv-virtualenv 설치
    curl https://pyenv.run | bash

    다음과 같은 내용이 출력되면 정상적으로 설치된 것을 의미합니다.

      % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
    Dload Upload Total Spent Left Speed
    0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239
    Cloning into '/home/mlops/.pyenv'...
    r
    ...
    중략...
    ...
    remote: Enumerating objects: 10, done.
    remote: Counting objects: 100% (10/10), done.
    remote: Compressing objects: 100% (6/6), done.
    remote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0
    Unpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.

    WARNING: seems you still have not added 'pyenv' to the load path.


    # See the README for instructions on how to set up
    # your shell environment for Pyenv.

    # Load pyenv-virtualenv automatically by adding
    # the following to ~/.bashrc:

    eval "$(pyenv virtualenv-init -)"

    1. pyenv 설정

    기본 shell로 bash shell을 사용하는 경우를 가정하였습니다. bash에서 pyenv와 pyenv-virtualenv 를 사용할 수 있도록 설정합니다.

    sudo vi ~/.bashrc

    다음 문자열을 입력한 후 저장합니다.

    export PATH="$HOME/.pyenv/bin:$PATH"
    eval "$(pyenv init -)"
    eval "$(pyenv virtualenv-init -)"

    shell을 restart 합니다.

    exec $SHELL

    pyenv 명령이 정상적으로 수행되는지 확인합니다.

    pyenv --help

    다음과 같은 메시지가 출력되면 정상적으로 설정된 것을 의미합니다.

    $ pyenv
    pyenv 2.2.2
    Usage: pyenv <command> [<args>]

    Some useful pyenv commands are:
    --version Display the version of pyenv
    activate Activate virtual environment
    commands List all available pyenv commands
    deactivate Deactivate virtual environment
    doctor Verify pyenv installation and development tools to build pythons.
    exec Run an executable with the selected Python version
    global Set or show the global Python version(s)
    help Display help for a command
    hooks List hook scripts for a given pyenv command
    init Configure the shell environment for pyenv
    install Install a Python version using python-build
    local Set or show the local application-specific Python version(s)
    prefix Display prefix for a Python version
    rehash Rehash pyenv shims (run this after installing executables)
    root Display the root directory where versions and shims are kept
    shell Set or show the shell-specific Python version
    shims List existing pyenv shims
    uninstall Uninstall a specific Python version
    version Show the current Python version(s) and its origin
    version-file Detect the file that sets the current pyenv version
    version-name Show the current Python version
    version-origin Explain how the current Python version is set
    versions List all Python versions available to pyenv
    virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
    virtualenv-delete Uninstall a specific Python virtualenv
    virtualenv-init Configure the shell environment for pyenv-virtualenv
    virtualenv-prefix Display real_prefix for a Python virtualenv version
    virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
    whence List all Python versions that contain the given executable
    which Display the full path to an executable

    See `pyenv help <command>' for information on a specific command.
    For full documentation, see: https://github.com/pyenv/pyenv#readme

    pyenv 사용

    Python 버전 설치

    pyenv install <Python-Version> 명령을 통해 원하는 파이썬 버전을 설치할 수 있습니다. -이번 페이지에서는 예시로 kubeflow에서 기본으로 사용하는 파이썬 3.7.12 버전을 설치하겠습니다.

    pyenv install 3.7.12

    정상적으로 설치되면 다음과 같은 메시지가 출력됩니다.

    $ pyenv install 3.7.12
    Downloading Python-3.7.12.tar.xz...
    -> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
    Installing Python-3.7.12...
    patching file Doc/library/ctypes.rst
    patching file Lib/test/test_unicode.py
    patching file Modules/_ctypes/_ctypes.c
    patching file Modules/_ctypes/callproc.c
    patching file Modules/_ctypes/ctypes.h
    patching file setup.py
    patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
    patching file Modules/_decimal/libmpdec/mpdecimal.h
    Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

    Python 가상환경 생성

    pyenv virtualenv <Installed-Python-Version> <가상환경-이름> 명령을 통해 원하는 파이썬 버전의 파이썬 가상환경을 생성할 수 있습니다.

    예시로 Python 3.7.12 버전의 demo라는 이름의 Python 가상환경을 생성하겠습니다.

    pyenv virtualenv 3.7.12 demo
    $ pyenv virtualenv 3.7.12 demo
    Looking in links: /tmp/tmpffqys0gv
    Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
    Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

    Python 가상환경 사용

    pyenv activate <가상환경 이름> 명령을 통해 위와 같은 방식으로 생성한 가상환경을 사용할 수 있습니다.

    예시로는 demo라는 이름의 Python 가상환경을 사용하겠습니다.

    pyenv activate demo

    다음과 같이 현재 가상환경의 정보가 shell의 맨 앞에 출력되는 것을 확인할 수 있습니다.

    Before

    mlops@ubuntu:~$ pyenv activate demo

    After

    pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
    (demo) mlops@ubuntu:~$

    Python 가상환경 비활성화

    source deactivate 명령을 통해 현재 사용 중인 가상환경을 비활성화할 수 있습니다.

    source deactivate

    Before

    (demo) mlops@ubuntu:~$ source deactivate

    After

    mlops@ubuntu:~$ 
    - +이번 페이지에서는 예시로 kubeflow에서 기본으로 사용하는 파이썬 3.7.12 버전을 설치하겠습니다.

    pyenv install 3.7.12

    정상적으로 설치되면 다음과 같은 메시지가 출력됩니다.

    $ pyenv install 3.7.12
    Downloading Python-3.7.12.tar.xz...
    -> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
    Installing Python-3.7.12...
    patching file Doc/library/ctypes.rst
    patching file Lib/test/test_unicode.py
    patching file Modules/_ctypes/_ctypes.c
    patching file Modules/_ctypes/callproc.c
    patching file Modules/_ctypes/ctypes.h
    patching file setup.py
    patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
    patching file Modules/_decimal/libmpdec/mpdecimal.h
    Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

    Python 가상환경 생성

    pyenv virtualenv <Installed-Python-Version> <가상환경-이름> 명령을 통해 원하는 파이썬 버전의 파이썬 가상환경을 생성할 수 있습니다.

    예시로 Python 3.7.12 버전의 demo라는 이름의 Python 가상환경을 생성하겠습니다.

    pyenv virtualenv 3.7.12 demo
    $ pyenv virtualenv 3.7.12 demo
    Looking in links: /tmp/tmpffqys0gv
    Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
    Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

    Python 가상환경 사용

    pyenv activate <가상환경 이름> 명령을 통해 위와 같은 방식으로 생성한 가상환경을 사용할 수 있습니다.

    예시로는 demo라는 이름의 Python 가상환경을 사용하겠습니다.

    pyenv activate demo

    다음과 같이 현재 가상환경의 정보가 shell의 맨 앞에 출력되는 것을 확인할 수 있습니다.

    Before

    mlops@ubuntu:~$ pyenv activate demo

    After

    pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
    (demo) mlops@ubuntu:~$

    Python 가상환경 비활성화

    source deactivate 명령을 통해 현재 사용 중인 가상환경을 비활성화할 수 있습니다.

    source deactivate

    Before

    (demo) mlops@ubuntu:~$ source deactivate

    After

    mlops@ubuntu:~$ 
    + \ No newline at end of file diff --git a/docs/further-readings/info/index.html b/docs/further-readings/info/index.html index d477bc14..4e039852 100644 --- a/docs/further-readings/info/index.html +++ b/docs/further-readings/info/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    다루지 못한 것들

    MLOps Component

    MLOps Concepts에서 다루었던 컴포넌트를 도식화하면 다음과 같습니다.

    open-stacks-0.png

    이 중 모두의 MLOps 에서 다룬 기술 스택들은 다음과 같습니다.

    open-stacks-1.png

    보시는 것처럼 아직 우리가 다루지 못한 많은 MLOps 컴포넌트들이 있습니다.

    시간 관계상 이번에 모두 다루지는 못했지만, 만약 필요하다면 다음과 같은 오픈소스들을 먼저 참고해보면 좋을 것 같습니다.

    open-stacks-2.png

    세부 내용은 다음과 같습니다.

    Mgmt.ComponentOpen Soruce
    Data Mgmt.CollectionKafka
    ValidationBeam
    Feature StoreFlink
    ML Model Dev. & ExperimentModelingJupyter
    Analysis & Experiment Mgmt.MLflow
    HPO Tuning & AutoMLKatib
    Deploy Mgmt.Serving FrameworkSeldon Core
    A/B TestIter8
    MonitoringGrafana, Prometheus
    Process Mgmt.pipelineKubeflow
    CI/CDGithub Action
    Continuous TrainingArgo Events
    Platform Mgmt.Configuration Mgmt.Consul
    Code Version Mgmt.Github, Minio
    Logging(EFK) Elastic Search, Fluentd, Kibana
    Resource Mgmt.Kubernetes
    - +
    버전: 1.0

    다루지 못한 것들

    MLOps Component

    MLOps Concepts에서 다루었던 컴포넌트를 도식화하면 다음과 같습니다.

    open-stacks-0.png

    이 중 모두의 MLOps 에서 다룬 기술 스택들은 다음과 같습니다.

    open-stacks-1.png

    보시는 것처럼 아직 우리가 다루지 못한 많은 MLOps 컴포넌트들이 있습니다.

    시간 관계상 이번에 모두 다루지는 못했지만, 만약 필요하다면 다음과 같은 오픈소스들을 먼저 참고해보면 좋을 것 같습니다.

    open-stacks-2.png

    세부 내용은 다음과 같습니다.

    Mgmt.ComponentOpen Soruce
    Data Mgmt.CollectionKafka
    ValidationBeam
    Feature StoreFlink
    ML Model Dev. & ExperimentModelingJupyter
    Analysis & Experiment Mgmt.MLflow
    HPO Tuning & AutoMLKatib
    Deploy Mgmt.Serving FrameworkSeldon Core
    A/B TestIter8
    MonitoringGrafana, Prometheus
    Process Mgmt.pipelineKubeflow
    CI/CDGithub Action
    Continuous TrainingArgo Events
    Platform Mgmt.Configuration Mgmt.Consul
    Code Version Mgmt.Github, Minio
    Logging(EFK) Elastic Search, Fluentd, Kibana
    Resource Mgmt.Kubernetes
    + \ No newline at end of file diff --git a/docs/introduction/component/index.html b/docs/introduction/component/index.html index 626bc640..9d7fb1c3 100644 --- a/docs/introduction/component/index.html +++ b/docs/introduction/component/index.html @@ -7,14 +7,14 @@ - +
    버전: 1.0

    3. Components of MLOps

    Practitioners guide to MLOps

    2021년 5월에 발표된 구글의 white paper : Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning에서는 MLOps의 핵심 기능들로 다음과 같은 것들을 언급하였습니다.

    mlops-component

    각 기능이 어떤 역할을 하는지 살펴보겠습니다.

    1. Experimentation

    실험(Experimentation)은 머신러닝 엔지니어들이 데이터를 분석하고, 프로토타입 모델을 만들며 학습 기능을 구현할 수 있도록 하는 다음과 같은 기능을 제공합니다.

    • 깃(Git)과 같은 버전 컨트롤 도구와 통합된 노트북(Jupyter Notebook) 환경 제공
    • 사용한 데이터, 하이퍼 파라미터, 평가 지표를 포함한 실험 추적 기능 제공
    • 데이터와 모델에 대한 분석 및 시각화 기능 제공

    2. Data Processing

    데이터 처리(Data Processing)는 머신러닝 모델 개발 단계, 지속적인 학습(Continuous Training) 단계, 그리고 API 배포(API Deployment) 단계에서 많은 양의 데이터를 사용할 수 있게 해 주는 다음과 같은 기능을 제공합니다.

    • 다양한 데이터 소스와 서비스에 호환되는 데이터 커넥터(connector) 기능 제공
    • 다양한 형태의 데이터와 호환되는 데이터 인코더(encoder) & 디코더(decoder) 기능 제공
    • 다양한 형태의 데이터에 대한 데이터 변환과 피처 엔지니어링(feature engineering) 기능 제공
    • 학습과 서빙을 위한 확장 가능한 배치, 스트림 데이터 처리 기능 제공

    3. Model training

    모델 학습(Model training)은 모델 학습을 위한 알고리즘을 효율적으로 실행시켜주는 다음과 같은 기능을 제공합니다.

    • ML 프레임워크의 실행을 위한 환경 제공
    • 다수의 GPU / 분산 학습 사용을 위한 분산 학습 환경 제공
    • 하이퍼 파라미터 튜닝과 최적화 기능 제공

    4. Model evaluation

    모델 평가(Model evaluation)는 실험 환경과 상용 환경에서 동작하는 모델의 성능을 관찰할 수 있는 다음과 같은 기능을 제공합니다.

    • 평가 데이터에 대한 모델 성능 평가 기능
    • 서로 다른 지속 학습 실행 결과에 대한 예측 성능 추적
    • 서로 다른 모델의 성능 비교와 시각화
    • 해석할 수 있는 AI 기술을 이용한 모델 출력 해석 기능 제공

    5. Model serving

    모델 서빙(Model serving)은 상용 환경에 모델을 배포하고 서빙하기 위한 다음과 같은 기능들을 제공합니다.

    • 저 지연 추론과 고가용성 추론 기능 제공
    • 다양한 ML 모델 서빙 프레임워크 지원(Tensorflow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGGoost. etc)
    • 복잡한 형태의 추론 루틴 기능 제공, 예를 들어 전처리(preprocess) 또는 후처리(postprocess) 기능과 최종 결과를 위해 다수의 모델이 사용되는 경우를 말합니다.
    • 순간적으로 치솟는 추론 요청을 처리하기 위한 오토 스케일링(autoscaling) 기능 제공
    • 추론 요청과 추론 결과에 대한 로깅 기능 제공

    6. Online experimentation

    온라인 실험(Online experimentation)은 새로운 모델이 생성되었을 때, 이 모델을 배포하면 어느 정도의 성능을 보일 것인지 검증하는 기능을 제공합니다. 이 기능은 새 모델을 배포하는 것까지 연동하기 위해 모델 저장소(Model Registry)와 연동되어야 합니다.

    • 카나리(canary) & 섀도(shadow) 배포 기능 제공
    • A/B 테스트 기능 제공
    • 멀티 암드 밴딧(Multi-armed bandit) 테스트 기능 제공

    7. Model Monitoring

    모델 모니터링(Model Monitoring)은 상용 환경에 배포된 모델이 정상적으로 동작하고 있는지를 모니터링하는 기능을 제공합니다. 예를 들어 모델의 성능이 떨어져 업데이트가 필요한지에 대한 정보 등을 제공합니다.

    8. ML Pipeline

    머신러닝 파이프라인(ML Pipeline)은 상용 환경에서 복잡한 ML 학습과 추론 작업을 구성하고 제어하고 자동화하기 위한 다음과 같은 기능을 제공합니다.

    • 다양한 이벤트를 소스를 통한 파이프라인 실행 기능
    • 파이프라인 파라미터와 생성되는 산출물 관리를 위한 머신러닝 메타데이터 추적과 연동 기능
    • 일반적인 머신러닝 작업을 위한 내장 컴포넌트 지원과 사용자가 직접 구현한 컴포넌트에 대한 지원 기능
    • 서로 다른 실행 환경 제공 기능

    9. Model Registry

    모델 저장소(Model Registry)는 머신러닝 모델의 생명 주기(Lifecycle)을 중앙 저장소에서 관리할 수 있게 해 주는 기능을 제공합니다.

    • 학습된 모델 그리고 배포된 모델에 대한 등록, 추적, 버저닝 기능 제공
    • 배포를 위해 필요한 데이터와 런타임 패키지들에 대한 정보 저장 기능

    10. Dataset and Feature Repository

    • 데이터에 대한 공유, 검색, 재사용 그리고 버전 관리 기능
    • 이벤트 스트리밍 및 온라인 추론 작업에 대한 실시간 처리 및 저 지연 서빙 기능
    • 사진, 텍스트, 테이블 형태의 데이터와 같은 다양한 형태의 데이터 지원 기능

    11. ML Metadata and Artifact Tracking

    MLOps의 각 단계에서는 다양한 형태의 산출물들이 생성됩니다. ML 메타데이터는 이런 산출물들에 대한 정보를 의미합니다. -ML 메타데이터와 산출물 관리는 산출물의 위치, 타입, 속성, 그리고 관련된 실험(experiment)에 대한 정보를 관리하기 위해 다음과 같은 기능들을 제공합니다.

    • ML 산출물에 대한 히스토리 관리 기능
    • 실험과 파이프라인 파라미터 설정에 대한 추적, 공유 기능
    • ML 산출물에 대한 저장, 접근, 시각화, 다운로드 기능 제공
    • 다른 MLOps 기능과의 통합 기능 제공
    - +ML 메타데이터와 산출물 관리는 산출물의 위치, 타입, 속성, 그리고 관련된 실험(experiment)에 대한 정보를 관리하기 위해 다음과 같은 기능들을 제공합니다.

    • ML 산출물에 대한 히스토리 관리 기능
    • 실험과 파이프라인 파라미터 설정에 대한 추적, 공유 기능
    • ML 산출물에 대한 저장, 접근, 시각화, 다운로드 기능 제공
    • 다른 MLOps 기능과의 통합 기능 제공
    + \ No newline at end of file diff --git a/docs/introduction/intro/index.html b/docs/introduction/intro/index.html index eb14f8e5..c52a6890 100644 --- a/docs/introduction/intro/index.html +++ b/docs/introduction/intro/index.html @@ -7,7 +7,7 @@ - + @@ -26,8 +26,8 @@ 이 말은 머신러닝팀과 운영팀 사이에 문제가 발생했다는 의미입니다. 그럼 왜 머신러닝팀과 운영팀에는 문제가 발생했을까요? 두 팀 간의 문제를 알아보기 위해서 추천시스템을 예시로 알아보겠습니다.

    Rule Based

    처음 추천시스템을 만드는 경우 간단한 규칙을 기반으로 아이템을 추천합니다. 예를 들어서 1주일간 판매량이 가장 많은 순서대로 보여주는 식의 방식을 이용합니다. 이 방식으로 모델을 정한다면 특별한 이유가 없는 이상 모델의 수정이 필요 없습니다.

    Machine Learning

    서비스의 규모가 조금 커지고 로그 데이터가 많이 쌓인다면 이를 이용해 아이템 기반 혹은 유저 기반의 머신러닝 모델을 생성합니다. 이때 모델은 정해진 주기에 따라 모델을 재학습 후 재배포합니다.

    Deep Learning

    개인화 추천에 대한 요구가 더 커지고 더 좋은 성능을 내는 모델을 필요해질 경우 딥러닝을 이용한 모델을 개발하기 시작합니다. 이때 만드는 모델은 머신러닝과 같이 정해진 주기에 따라 모델을 재학습 후 재배포합니다.

    graph

    위에서 설명한 것을 x축을 모델의 복잡도, y축을 모델의 성능으로 두고 그래프로 표현한다면 다음과 같이 복잡도가 올라갈 때 모델의 성능이 올라가는 상승 관계를 갖습니다. 머신러닝에서 딥러닝으로 넘어갈 머신러닝 팀이 새로 생기게 됩니다.

    만약 관리해야할 모델이 적다면 서로 협업을 통해서 충분히 해결할 수 있지만 개발해야 할 모델이 많아진다면 DevOps의 경우와 같이 사일로 현상이 나타나게 됩니다.

    DevOps의 목표와 맞춰서 생각해보면 MLOps의 목표는 개발한 모델이 정상적으로 배포될 수 있는지 테스트하는 것입니다. 개발팀에서 개발한 기능이 정상적으로 배포될 수 있는지 확인하는 것이 DevOps의 목표였다면, MLOps의 목표는 머신러닝 팀에서 개발한 모델이 정상적으로 배포될 수 있는지 확인하는 것입니다.

    2) ML -> Ops

    하지만 최근 나오고 있는 MLOps 관련 제품과 설명을 보면 꼭 앞에서 설명한 목표만을 대상으로 하고 있지 않습니다. 어떤 경우에는 머신러닝 팀에서 만든 모델을 이용해 직접 운영을 할 수 있도록 도와주려고 합니다. 이러한 니즈는 최근 머신러닝 프로젝트가 진행되는 과정에서 알 수 있습니다.

    추천시스템의 경우 운영에서 간단한 모델부터 시작해 운영할 수 있었습니다. 하지만 자연어, 이미지와 같은 곳에서는 규칙 기반의 모델보다는 딥러닝을 이용해 주어진 태스크를 해결할 수 있는지 검증(POC)를 선행하는 경우가 많습니다. 검증이 끝난 프로젝트는 이제 서비스를 위한 운영 환경을 개발하기 시작합니다. 하지만 머신러닝 팀 내의 자체 역량으로는 이 문제를 해결하기 쉽지 않습니다. 이를 해결하기 위해서 MLOps가 필요한 경우도 있습니다.

    3) 결론

    요약하자면 MLOps는 두 가지 목표가 있습니다. 앞에서 설명한 MLOps는 ML+Ops 로 두 팀의 생산성 향상을 위한 것이였습니다. -반면, 뒤에서 설명한 것은 ML->Ops 로 머신러닝 팀에서 직접 운영을 할 수 있도록 도와주는 것을 말합니다.

    - +반면, 뒤에서 설명한 것은 ML->Ops 로 머신러닝 팀에서 직접 운영을 할 수 있도록 도와주는 것을 말합니다.

    + \ No newline at end of file diff --git a/docs/introduction/levels/index.html b/docs/introduction/levels/index.html index 1ed977f0..d892e620 100644 --- a/docs/introduction/levels/index.html +++ b/docs/introduction/levels/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ 예를 들어서 어떤 기능에서는 파이썬 3.7을 쓰고 어떤 기능에서는 파이썬 3.8을 쓴다면 다음과 같은 상황을 자주 목격할 수 있습니다.

    이러한 상황이 일어나는 이유는 머신러닝 모델의 특성에 있습니다. 학습된 머신러닝 모델이 동작하기 위해서는 3가지가 필요합니다.

    1. 파이썬 코드
    2. 학습된 가중치
    3. 환경 (패키지, 버전 등)

    만약 이 3가지 중 한 가지라도 전달이 잘못 된다면 모델이 동작하지 않거나 예상하지 못한 예측을 할수 있습니다. 그런데 많은 경우 환경이 일치하지 않아서 동작하지 않는 경우가 많습니다. 머신러닝은 다양한 오픈소스를 사용하는데 오픈소스는 특성상 어떤 버전을 쓰는지에 따라서 같은 함수라도 결과가 다를 수 있습니다.

    이러한 문제는 서비스 초기에는 관리할 모델이 많지 않기 때문에 금방 해결할 수 있습니다. 하지만 관리하는 기능들이 많아지고 서로 소통에 어려움을 겪게 된다면 성능이 더 좋은 모델을 빠르게 배포할 수 없게 됩니다.

    1단계: ML 파이프라인 자동화

    Pipeline

    level-1-pipeline

    그래서 MLOps에서는 “파이프라인(Pipeline)”을 이용해 이러한 문제를 방지하고자 했습니다. MLOps의 파이프라인은 도커와 같은 컨테이너를 이용해 머신러닝 엔지니어가 모델 개발에 사용한 것과 동일한 환경으로 동작되는 것을 보장합니다. 이를 통해서 환경이 달라서 모델이 동작하지 않는 상황을 방지합니다.

    그런데 파이프라인은 범용적인 용어로 여러 다양한 태스크에서 사용됩니다. 머신러닝 엔지니어가 작성하는 파이프라인의 역할은 무엇일까요?
    머신러닝 엔지니어가 작성하는 파이프라인은 학습된 모델을 생산합니다. 그래서 파이프라인 대신 학습 파이프라인(Training Pipeline)이 더 정확하다고 볼 수 있습니다.

    Continuous Training

    level-1-ct.png

    그리고 Continuous Training(CT) 개념이 추가됩니다. 그렇다면 CT는 왜 필요할까요?

    Auto Retrain

    Real World에서 데이터는 Data Shift라는 데이터의 분포가 계속해서 변하는 특징이 있습니다. 그래서 과거에 학습한 모델이 시간이 지남에 따라 모델의 성능이 저하되는 문제가 있습니다. 이 문제를 해결하는 가장 간단하고 효과적인 해결책은 바로 최근 데이터를 이용해 모델을 재학습하는 것입니다. 변화된 데이터 분포에 맞춰서 모델을 재학습하면 다시 준수한 성능을 낼 수 있습니다.

    Auto Deploy

    하지만 제조업과 같이 한 공장에서 여러 레시피를 처리하는 경우 무조건 재학습을 하는 것이 좋지 않을 수 도 있습니다. Blind Spot이 대표적인 예입니다.

    예를 들어서 자동차 생산 라인에서 모델 A에 대해서 모델을 만들고 이를 이용해 예측을 진행하고 있었습니다. 만약 전혀 다른 모델 B가 들어오면 이전에 보지 못한 데이터 패턴이기 때문에 모델 B에 대해서 새로운 모델을 학습합니다.

    이제 모델 B에 대해서 모델을 만들었기 때문에 모델은 예측을 진행할 것 입니다. 그런데 만약 데이터가 다시 모델 A로 바뀐다면 어떻게 할까요?
    만약 Retraining 규칙만 있다면 다시 모델 A에 대해서 새로운 모델을 학습하게 됩니다. 그런데 머신러닝 모델이 충분한 성능을 보이기 위해서는 충분한 양의 데이터가 모여야 합니다. Blind Spot이란 이렇게 데이터를 모으기 위해서 모델이 동작하지 않는 구간을 말합니다.

    이러한 Blind Spot을 해결하는 방법은 간단할 수 있습니다. 바로 모델 A에 대한 모델이 과거에 있었는지 확인하고 만약 있었다면 새로운 모델을 바로 학습하기 보다는 이 전 모델을 이용해 다시 예측을 하면 이런 Blind Spot을 해결할 수 있습니다. 이렇게 모델와 같은 메타 데이터를 이용해 모델을 자동으로 변환해주는 것을 Auto Deploy라고 합니다.

    정리하자면 CT를 위해서는 Auto Retraining의과 Auto Deploy 두 가지 기능이 필요합니다. 둘은 서로의 단점을 보완해 계속해서 모델의 성능을 유지할 수 있게 합니다.

    2단계: CI/CD 파이프라인의 자동화

    level-2

    2단계의 제목은 CI와 CD의 자동화 입니다. DevOps에서의 CI/CD의 대상은 소스 코드입니다. 그렇다면 MLOps는 어떤 것이 CI/CD의 대상일까요?

    MLOps의 CI/CD 대상 또한 소스 코드인 것은 맞지만 조금 더 엄밀히 정의하자면 학습 파이프라인이라고 볼 수 있습니다.

    그래서 모델을 학습하는데 있어서 영향이 있는 변화에 대해서 실제로 모델이 정상적으로 학습이 되는지 (CI), 학습된 모델이 정상적으로 동작하는지 (CD)를 확인해야 합니다. 그래서 학습을 하는 코드에 직접적인 수정이 있는 경우에는 CI/CD를 진행해야 합니다.

    코드 외에도 사용하는 패키지의 버전, 파이썬의 버전 변경도 CI/CD의 대상입니다. 많은 경우 머신 러닝은 오픈 소스를 이용합니다. 하지만 오픈 소스는 그 특성상 버전이 바뀌었을 때 함수의 내부 로직이 변하는 경우도 있습니다. 물론 어느 정도 버전이 올라 갈 때 이와 관련된 알림을 주지만 한 번에 버전이 크게 바뀐다면 이러한 변화를 모를 수도 있습니다.
    -그래서 사용하는 패키지의 버전이 변하는 경우에도 CI/CD를 통해 정상적으로 모델이 학습, 동작하는지 확인을 해야 합니다.

    - +그래서 사용하는 패키지의 버전이 변하는 경우에도 CI/CD를 통해 정상적으로 모델이 학습, 동작하는지 확인을 해야 합니다.

    + \ No newline at end of file diff --git a/docs/introduction/why_kubernetes/index.html b/docs/introduction/why_kubernetes/index.html index 3fd32d2b..7edf6d27 100644 --- a/docs/introduction/why_kubernetes/index.html +++ b/docs/introduction/why_kubernetes/index.html @@ -7,7 +7,7 @@ - + @@ -23,8 +23,8 @@ 만약, 특정 서비스가 장애를 일으켰다면 여러 컨테이너의 로그를 확인해가며 문제를 파악해야 합니다.
    또한, 특정 클러스터나 특정 컨테이너에 작업이 몰리지 않도록 스케줄링(Scheduling)하고 로드 밸런싱(Load Balancing)하며, 스케일링(Scaling)하는 등의 수많은 작업을 담당해야 합니다. 이렇게 수많은 컨테이너의 상태를 지속해서 관리하고 운영하는 과정을 조금이나마 쉽게, 자동으로 할 수 있는 기능을 제공해주는 소프트웨어가 바로 컨테이너 오케스트레이션 시스템입니다.

    머신러닝에서는 어떻게 쓰일 수 있을까요?
    -예를 들어서 GPU가 있어야 하는 딥러닝 학습 코드가 패키징된 컨테이너는 사용 가능한 GPU가 있는 클러스터에서 수행하고, 많은 메모리를 필요로 하는 데이터 전처리 코드가 패키징된 컨테이너는 메모리의 여유가 많은 클러스터에서 수행하고, 학습 중에 클러스터에 문제가 생기면 자동으로 같은 컨테이너를 다른 클러스터로 이동시키고 다시 학습을 진행하는 등의 작업을 사람이 일일이 수행하지 않고, 자동으로 관리하는 시스템을 개발한 뒤 맡기는 것입니다.

    집필을 하는 2022년을 기준으로 쿠버네티스는 컨테이너 오케스트레이션 시스템의 사실상의 표준(De facto standard)입니다.

    CNCF에서 2018년 발표한 Survey 에 따르면 다음 그림과 같이 이미 두각을 나타내고 있었으며, 2019년 발표한 Survey에 따르면 그중 78%가 상용 수준(Production Level)에서 사용하고 있다는 것을 알 수 있습니다.

    k8s-graph

    쿠버네티스 생태계가 이처럼 커지게 된 이유에는 여러 가지 이유가 있습니다. 하지만 도커와 마찬가지로 쿠버네티스 역시 머신러닝 기반의 서비스에서만 사용하는 기술이 아니기에, 자세히 다루기에는 상당히 많은 양의 기술적인 내용을 다루어야 하므로 이번 모두의 MLOps에서는 자세한 내용은 생략할 예정입니다.

    다만, 모두의 MLOps에서 앞으로 다룰 내용은 도커와 쿠버네티스에 대한 내용을 어느 정도 알고 계신 분들을 대상으로 작성하였습니다. 따라서 쿠버네티스에 대해 익숙하지 않으신 분들은 다음 쿠버네티스 공식 문서, subicura 님의 개인 블로그 글 등의 쉽고 자세한 자료들을 먼저 참고해주시는 것을 권장합니다.

    - +예를 들어서 GPU가 있어야 하는 딥러닝 학습 코드가 패키징된 컨테이너는 사용 가능한 GPU가 있는 클러스터에서 수행하고, 많은 메모리를 필요로 하는 데이터 전처리 코드가 패키징된 컨테이너는 메모리의 여유가 많은 클러스터에서 수행하고, 학습 중에 클러스터에 문제가 생기면 자동으로 같은 컨테이너를 다른 클러스터로 이동시키고 다시 학습을 진행하는 등의 작업을 사람이 일일이 수행하지 않고, 자동으로 관리하는 시스템을 개발한 뒤 맡기는 것입니다.

    집필을 하는 2022년을 기준으로 쿠버네티스는 컨테이너 오케스트레이션 시스템의 사실상의 표준(De facto standard)입니다.

    CNCF에서 2018년 발표한 Survey 에 따르면 다음 그림과 같이 이미 두각을 나타내고 있었으며, 2019년 발표한 Survey에 따르면 그중 78%가 상용 수준(Production Level)에서 사용하고 있다는 것을 알 수 있습니다.

    k8s-graph

    쿠버네티스 생태계가 이처럼 커지게 된 이유에는 여러 가지 이유가 있습니다. 하지만 도커와 마찬가지로 쿠버네티스 역시 머신러닝 기반의 서비스에서만 사용하는 기술이 아니기에, 자세히 다루기에는 상당히 많은 양의 기술적인 내용을 다루어야 하므로 이번 모두의 MLOps에서는 자세한 내용은 생략할 예정입니다.

    다만, 모두의 MLOps에서 앞으로 다룰 내용은 도커와 쿠버네티스에 대한 내용을 어느 정도 알고 계신 분들을 대상으로 작성하였습니다. 따라서 쿠버네티스에 대해 익숙하지 않으신 분들은 다음 쿠버네티스 공식 문서, subicura 님의 개인 블로그 글 등의 쉽고 자세한 자료들을 먼저 참고해주시는 것을 권장합니다.

    + \ No newline at end of file diff --git a/docs/kubeflow-dashboard-guide/experiments-and-others/index.html b/docs/kubeflow-dashboard-guide/experiments-and-others/index.html index f3916573..960cb732 100644 --- a/docs/kubeflow-dashboard-guide/experiments-and-others/index.html +++ b/docs/kubeflow-dashboard-guide/experiments-and-others/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    6. Kubeflow Pipeline 관련

    Central Dashboard의 왼쪽 탭의 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions 페이지들에서는 Kubeflow Pipeline과 Pipeline의 실행 그리고 Pipeline Run의 결과를 관리합니다.

    left-tabs

    Kubeflow Pipeline이 모두의 MLOps에서 Kubeflow를 사용하는 주된 이유이며, Kubeflow Pipeline을 만드는 방법, 실행하는 방법, 결과를 확인하는 방법 등 자세한 내용은 3.Kubeflow에서 다룹니다.

    - +
    버전: 1.0

    6. Kubeflow Pipeline 관련

    Central Dashboard의 왼쪽 탭의 Experiments(KFP), Pipelines, Runs, Recurring Runs, Artifacts, Executions 페이지들에서는 Kubeflow Pipeline과 Pipeline의 실행 그리고 Pipeline Run의 결과를 관리합니다.

    left-tabs

    Kubeflow Pipeline이 모두의 MLOps에서 Kubeflow를 사용하는 주된 이유이며, Kubeflow Pipeline을 만드는 방법, 실행하는 방법, 결과를 확인하는 방법 등 자세한 내용은 3.Kubeflow에서 다룹니다.

    + \ No newline at end of file diff --git a/docs/kubeflow-dashboard-guide/experiments/index.html b/docs/kubeflow-dashboard-guide/experiments/index.html index e736192f..573a0067 100644 --- a/docs/kubeflow-dashboard-guide/experiments/index.html +++ b/docs/kubeflow-dashboard-guide/experiments/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    5. Experiments(AutoML)

    다음으로는 Central Dashboard의 왼쪽 탭의 Experiments(AutoML)을 클릭해보겠습니다.

    left-tabs

    automl

    Experiments(AutoML) 페이지는 Kubeflow에서 Hyperparameter Tuning과 Neural Architecture Search를 통한 AutoML을 담당하는 Katib를 관리할 수 있는 페이지입니다.

    Katib와 Experiments(AutoML)에 대한 사용법은 모두의 MLOps v1.0에서는 다루지 않으며, v2.0에 추가될 예정입니다.

    - +
    버전: 1.0

    5. Experiments(AutoML)

    다음으로는 Central Dashboard의 왼쪽 탭의 Experiments(AutoML)을 클릭해보겠습니다.

    left-tabs

    automl

    Experiments(AutoML) 페이지는 Kubeflow에서 Hyperparameter Tuning과 Neural Architecture Search를 통한 AutoML을 담당하는 Katib를 관리할 수 있는 페이지입니다.

    Katib와 Experiments(AutoML)에 대한 사용법은 모두의 MLOps v1.0에서는 다루지 않으며, v2.0에 추가될 예정입니다.

    + \ No newline at end of file diff --git a/docs/kubeflow-dashboard-guide/intro/index.html b/docs/kubeflow-dashboard-guide/intro/index.html index d309120d..4de00c9a 100644 --- a/docs/kubeflow-dashboard-guide/intro/index.html +++ b/docs/kubeflow-dashboard-guide/intro/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    1. Central Dashboard

    Kubeflow 설치를 완료하면, 다음 커맨드를 통해 대시보드에 접속할 수 있습니다.

    kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

    after-login

    Central Dashboard는 Kubeflow에서 제공하는 모든 기능을 통합하여 제공하는 UI입니다. Central Dashboard에서 제공하는 기능은 크게 왼쪽의 탭을 기준으로 구분할 수 있습니다.

    left-tabs

    • Home
    • Notebooks
    • Tensorboards
    • Volumes
    • Models
    • Experiments(AutoML)
    • Experiments(KFP)
    • Pipelines
    • Runs
    • Recurring Runs
    • Artifacts
    • Executions

    그럼 이제 기능별 간단한 사용법을 알아보겠습니다.

    - +
    버전: 1.0

    1. Central Dashboard

    Kubeflow 설치를 완료하면, 다음 커맨드를 통해 대시보드에 접속할 수 있습니다.

    kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

    after-login

    Central Dashboard는 Kubeflow에서 제공하는 모든 기능을 통합하여 제공하는 UI입니다. Central Dashboard에서 제공하는 기능은 크게 왼쪽의 탭을 기준으로 구분할 수 있습니다.

    left-tabs

    • Home
    • Notebooks
    • Tensorboards
    • Volumes
    • Models
    • Experiments(AutoML)
    • Experiments(KFP)
    • Pipelines
    • Runs
    • Recurring Runs
    • Artifacts
    • Executions

    그럼 이제 기능별 간단한 사용법을 알아보겠습니다.

    + \ No newline at end of file diff --git a/docs/kubeflow-dashboard-guide/notebooks/index.html b/docs/kubeflow-dashboard-guide/notebooks/index.html index 3e6fe3d6..1a616451 100644 --- a/docs/kubeflow-dashboard-guide/notebooks/index.html +++ b/docs/kubeflow-dashboard-guide/notebooks/index.html @@ -7,14 +7,14 @@ - +
    버전: 1.0

    2. Notebooks

    노트북 서버(Notebook Server) 생성하기

    다음 Central Dashboard의 왼쪽 탭의 Notebooks를 클릭해보겠습니다.

    left-tabs

    다음과 같은 화면을 볼 수 있습니다.

    Notebooks 탭은 JupyterHub와 비슷하게 유저별로 jupyter notebook 및 code server 환경(이하 노트북 서버)을 독립적으로 생성하고 접속할 수 있는 페이지입니다.

    notebook-home

    오른쪽 위의 + NEW NOTEBOOK 버튼을 클릭합니다.

    new-notebook

    아래와 같은 화면이 나타나면, 이제 생성할 노트북 서버의 스펙(Spec)을 명시하여 생성합니다.

    create

    각 스펙에 대한 자세한 내용은 아래와 같습니다.
    • name:
      • 노트북 서버를 구분할 수 있는 이름으로 생성합니다.
    • namespace :
      • 따로 변경할 수 없습니다. (현재 로그인한 user 계정의 namespace이 자동으로 지정되어 있습니다.)
    • Image:
      • sklearn, pytorch, tensorflow 등의 파이썬 패키지가 미리 설치된 jupyter lab 이미지 중 사용할 이미지를 선택합니다.
        • 노트북 서버 내에서 GPU를 사용하여 tensorflow-cuda, pytorch-cuda 등의 이미지를 사용하는 경우, 하단의 GPUs 부분을 확인하시기 바랍니다.
      • 추가적인 패키지나 소스코드 등을 포함한 커스텀(Custom) 노트북 서버를 사용하고 싶은 경우에는 커스텀 이미지(Custom Image)를 만들고 배포 후 사용할 수도 있습니다.
    • CPU / RAM
      • 필요한 자원 사용량을 입력합니다.
        • cpu : core 단위
          • 가상 core 개수 단위를 의미하며, int 형식이 아닌 1.5, 2.7 등의 float 형식도 입력할 수 있습니다.
        • memory : Gi 단위
    • GPUs
      • 주피터 노트북에 할당할 GPU 개수를 입력합니다.
        • None
          • GPU 자원이 필요하지 않은 상황
        • 1, 2, 4
          • GPU 1, 2, 4 개 할당
      • GPU Vendor
        • 앞의 (Optional) Setup GPU 를 따라 nvidia gpu plugin을 설치하였다면 NVIDIA를 선택합니다.
    • Workspace Volume
      • 노트북 서버 내에서 필요한 만큼의 디스크 용량을 입력합니다.
      • Type 과 Name 은 변경하지 않고, 디스크 용량을 늘리고 싶거나 AccessMode 를 변경하고 싶을 때에만 변경해서 사용하시면 됩니다.
        • "Don't use Persistent Storage for User's home" 체크박스는 노트북 서버의 작업 내용을 저장하지 않아도 상관없을 때에만 클릭합니다. 일반적으로는 누르지 않는 것을 권장합니다.
        • 기존에 미리 생성해두었던 PVC를 사용하고 싶을 때에는, Type을 "Existing" 으로 입력하여 해당 PVC의 이름을 입력하여 사용하시면 됩니다.
    • Data Volumes
      • 추가적인 스토리지 자원이 필요하다면 "+ ADD VOLUME" 버튼을 클릭하여 생성할 수 있습니다.
    • Configurations, Affinity/Tolerations, Miscellaneous Settings
      • 일반적으로는 필요하지 않으므로 모두의 MLOps에서는 자세한 설명을 생략합니다.

    모두 정상적으로 입력하였다면 하단의 LAUNCH 버튼이 활성화되며, 버튼을 클릭하면 노트북 서버 생성이 시작됩니다.

    creating

    생성 후 아래와 같이 Status 가 초록색 체크 표시 아이콘으로 변하며, CONNECT 버튼이 활성화됩니다.

    created


    노트북 서버 접속하기

    CONNECT 버튼을 클릭하면 브라우저에 새 창이 열리며, 다음과 같은 화면이 보입니다.

    notebook-access

    Launcher의 Notebook, Console, Terminal 아이콘을 클릭하여 사용할 수 있습니다.

    생성된 Notebook 화면

    notebook-console

    생성된 Terminal 화면

    terminal-console


    노트북 서버 중단하기

    노트북 서버를 오랜 시간 사용하지 않는 경우, 쿠버네티스 클러스터의 효율적인 리소스 사용을 위해서 노트북 서버를 중단(Stop)할 수 있습니다. 단, 이 경우 노트북 서버 생성 시 Workspace Volume 또는 Data Volume으로 지정해놓은 경로 외에 저장된 데이터는 모두 초기화되는 것에 주의하시기 바랍니다.
    -노트북 서버 생성 당시 경로를 변경하지 않았다면, 디폴트(Default) Workspace Volume의 경로는 노트북 서버 내의 /home/jovyan 이므로, /home/jovyan 의 하위 경로 이외의 경로에 저장된 데이터는 모두 사라집니다.

    다음과 같이 STOP 버튼을 클릭하면 노트북 서버가 중단됩니다.

    notebook-stop

    중단이 완료되면 다음과 같이 CONNECT 버튼이 비활성화되며, PLAY 버튼을 클릭하면 다시 정상적으로 사용할 수 있습니다.

    notebook-restart

    - +노트북 서버 생성 당시 경로를 변경하지 않았다면, 디폴트(Default) Workspace Volume의 경로는 노트북 서버 내의 /home/jovyan 이므로, /home/jovyan 의 하위 경로 이외의 경로에 저장된 데이터는 모두 사라집니다.

    다음과 같이 STOP 버튼을 클릭하면 노트북 서버가 중단됩니다.

    notebook-stop

    중단이 완료되면 다음과 같이 CONNECT 버튼이 비활성화되며, PLAY 버튼을 클릭하면 다시 정상적으로 사용할 수 있습니다.

    notebook-restart

    + \ No newline at end of file diff --git a/docs/kubeflow-dashboard-guide/tensorboards/index.html b/docs/kubeflow-dashboard-guide/tensorboards/index.html index 768c71c7..72e72ae0 100644 --- a/docs/kubeflow-dashboard-guide/tensorboards/index.html +++ b/docs/kubeflow-dashboard-guide/tensorboards/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    3. Tensorboards

    다음으로는 Central Dashboard의 왼쪽 탭의 Tensorboards를 클릭해보겠습니다.

    left-tabs

    다음과 같은 화면을 볼 수 있습니다.

    tensorboard

    Tensorboards 탭은 Tensorflow, PyTorch 등의 프레임워크에서 제공하는 Tensorboard 유틸이 생성한 ML 학습 관련 데이터를 시각화하는 텐서보드 서버(Tensorboard Server)를 쿠버네티스 클러스터에 생성하는 기능을 제공합니다.

    이렇게 생성한 텐서보드 서버는, 일반적인 원격 텐서보드 서버의 사용법과 같이 사용할 수도 있으며, Kubeflow 파이프라인 런에서 바로 텐서보드 서버에 데이터를 저장하는 용도로 활용할 수 있습니다.

    Kubeflow 파이프라인 런의 결과를 시각화하는 방법에는 다양한 방식이 있으며, 모두의 MLOps에서는 더 일반적으로 활용할 수 있도록 Kubeflow 컴포넌트의 Visualization 기능과 MLflow의 시각화 기능을 활용할 예정이므로, Tensorboards 페이지에 대한 자세한 설명은 생략하겠습니다.

    - +
    버전: 1.0

    3. Tensorboards

    다음으로는 Central Dashboard의 왼쪽 탭의 Tensorboards를 클릭해보겠습니다.

    left-tabs

    다음과 같은 화면을 볼 수 있습니다.

    tensorboard

    Tensorboards 탭은 Tensorflow, PyTorch 등의 프레임워크에서 제공하는 Tensorboard 유틸이 생성한 ML 학습 관련 데이터를 시각화하는 텐서보드 서버(Tensorboard Server)를 쿠버네티스 클러스터에 생성하는 기능을 제공합니다.

    이렇게 생성한 텐서보드 서버는, 일반적인 원격 텐서보드 서버의 사용법과 같이 사용할 수도 있으며, Kubeflow 파이프라인 런에서 바로 텐서보드 서버에 데이터를 저장하는 용도로 활용할 수 있습니다.

    Kubeflow 파이프라인 런의 결과를 시각화하는 방법에는 다양한 방식이 있으며, 모두의 MLOps에서는 더 일반적으로 활용할 수 있도록 Kubeflow 컴포넌트의 Visualization 기능과 MLflow의 시각화 기능을 활용할 예정이므로, Tensorboards 페이지에 대한 자세한 설명은 생략하겠습니다.

    + \ No newline at end of file diff --git a/docs/kubeflow-dashboard-guide/volumes/index.html b/docs/kubeflow-dashboard-guide/volumes/index.html index 87dc9974..a48345ed 100644 --- a/docs/kubeflow-dashboard-guide/volumes/index.html +++ b/docs/kubeflow-dashboard-guide/volumes/index.html @@ -7,15 +7,15 @@ - +
    버전: 1.0

    4. Volumes

    Volumes

    다음으로는 Central Dashboard의 왼쪽 탭의 Volumes를 클릭해보겠습니다.

    left-tabs

    다음과 같은 화면을 볼 수 있습니다.

    volumes

    Volumes 탭은 Kubernetes의 볼륨(Volume), 정확히는 퍼시스턴트 볼륨 클레임(Persistent Volume Claim, 이하 pvc) 중 현재 user의 namespace에 속한 pvc를 관리하는 기능을 제공합니다.

    위 스크린샷을 보면, 1. Notebooks 페이지에서 생성한 Volume의 정보를 확인할 수 있습니다. 해당 Volume의 Storage Class는 쿠버네티스 클러스터 설치 당시 설치한 Default Storage Class인 local-path로 설정되어있음을 확인할 수 있습니다.

    이외에도 user namespace에 새로운 볼륨을 생성하거나, 조회하거나, 삭제하고 싶은 경우에 Volumes 페이지를 활용할 수 있습니다.


    볼륨 생성하기

    오른쪽 위의 + NEW VOLUME 버튼을 클릭하면 다음과 같은 화면을 볼 수 있습니다.

    new-volume

    name, size, storage class, access mode를 지정하여 생성할 수 있습니다.

    원하는 리소스 스펙을 지정하여 생성하면 다음과 같이 볼륨의 Status가 Pending으로 조회됩니다. Status 아이콘에 마우스 커서를 가져다 대면 해당 볼륨은 mount하여 사용하는 first consumer가 나타날 때 실제로 생성을 진행한다(This volume will be bound when its first consumer is created.)는 메시지를 확인할 수 있습니다.
    이는 실습을 진행하는 StorageClasslocal-path의 볼륨 생성 정책에 해당하며, 문제 상황이 아닙니다.
    -해당 페이지에서 Status가 Pending 으로 보이더라도 해당 볼륨을 사용하길 원하는 노트북 서버 혹은 파드(Pod)에서는 해당 볼륨의 이름을 지정하여 사용할 수 있으며, 그때 실제로 볼륨 생성이 진행됩니다.

    creating-volume

    - +해당 페이지에서 Status가 Pending 으로 보이더라도 해당 볼륨을 사용하길 원하는 노트북 서버 혹은 파드(Pod)에서는 해당 볼륨의 이름을 지정하여 사용할 수 있으며, 그때 실제로 볼륨 생성이 진행됩니다.

    creating-volume

    + \ No newline at end of file diff --git a/docs/kubeflow/advanced-component/index.html b/docs/kubeflow/advanced-component/index.html index 0ea14464..fb463f7e 100644 --- a/docs/kubeflow/advanced-component/index.html +++ b/docs/kubeflow/advanced-component/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 바로 입력과 출력에서 받는 argument중 경로와 관련된 것들에 _path 접미사가 모두 사라졌습니다.
    iris_data.outputs["data_path"] 가 아닌 iris_data.outputs["data"] 으로 접근하는 것을 확인할 수 있습니다.
    이는 kubeflow에서 정한 법칙으로 InputPathOutputPath 으로 생성된 경로들은 파이프라인에서 접근할 때는 _path 접미사를 생략하여 접근합니다.

    다만 방금 작성한 파이프라인을 업로드할 경우 실행이 되지 않습니다. -이유는 다음 페이지에서 설명합니다.

    - +이유는 다음 페이지에서 설명합니다.

    + \ No newline at end of file diff --git a/docs/kubeflow/advanced-environment/index.html b/docs/kubeflow/advanced-environment/index.html index 0df5d5c1..a6d57601 100644 --- a/docs/kubeflow/advanced-environment/index.html +++ b/docs/kubeflow/advanced-environment/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Kubeflow는 쿠버네티스를 이용하기 때문에 컴포넌트 래퍼는 각각 독립된 컨테이너 위에서 컴포넌트 콘텐츠를 실행합니다.

    자세히 보면 생성된 만든 train_from_csv.yaml 에서 정해진 이미지는 image: python:3.7 입니다.

    이제 어떤 이유 때문에 실행이 안 되는지 눈치채신 분들도 있을 것입니다.

    python:3.7 이미지에는 우리가 사용하고자 하는 dill, pandas, sklearn 이 설치되어 있지 않습니다.
    그러므로 실행할 때 해당 패키지가 존재하지 않는다는 에러와 함께 실행이 안 됩니다.

    그럼 어떻게 패키지를 추가할 수 있을까요?

    패키지 추가 방법

    Kubeflow를 변환하는 과정에서 두 가지 방법을 통해 패키지를 추가할 수 있습니다.

    1. base_image 사용
    2. package_to_install 사용

    컴포넌트를 컴파일할 때 사용했던 함수 create_component_from_func 가 어떤 argument들을 받을 수 있는지 확인해 보겠습니다.

    def create_component_from_func(
    func: Callable,
    output_component_file: Optional[str] = None,
    base_image: Optional[str] = None,
    packages_to_install: List[str] = None,
    annotations: Optional[Mapping[str, str]] = None,
    ):
    • func: 컴포넌트로 만들 컴포넌트 래퍼 함수
    • base_image: 컴포넌트 래퍼가 실행할 이미지
    • packages_to_install: 컴포넌트에서 사용해서 추가로 설치해야 하는 패키지

    1. base_image

    컴포넌트가 실행되는 순서를 좀 더 자세히 들여다보면 다음과 같습니다.

    1. docker pull base_image
    2. pip install packages_to_install
    3. run command

    만약 컴포넌트가 사용하는 base_image에 패키지들이 전부 설치되어 있다면 추가적인 패키지 설치 없이 바로 사용할 수 있습니다.

    예를 들어, 이번 페이지에서는 다음과 같은 Dockerfile을 작성하겠습니다.

    FROM python:3.7

    RUN pip install dill pandas scikit-learn

    위의 Dockerfile을 이용해 이미지를 빌드해 보겠습니다. 실습에서 사용해볼 도커 허브는 ghcr입니다.
    각자 환경에 맞추어서 도커 허브를 선택 후 업로드하면 됩니다.

    docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image
    docker push ghcr.io/mlops-for-all/base-image

    이제 base_image를 입력해 보겠습니다.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    base_image="ghcr.io/mlops-for-all/base-image:latest",
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    이제 생성된 컴포넌트를 컴파일하면 다음과 같이 나옵니다.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: ghcr.io/mlops-for-all/base-image:latest
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    base_image가 우리가 설정한 값으로 바뀐 것을 확인할 수 있습니다.

    2. packages_to_install

    하지만 패키지가 추가될 때마다 docker 이미지를 계속해서 새로 생성하는 작업은 많은 시간이 소요됩니다. -이 때, packages_to_install argument 를 사용하면 패키지를 컨테이너에 쉽게 추가할 수 있습니다.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    스크립트를 실행하면 다음과 같은 train_from_csv.yaml 파일이 생성됩니다.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    위에 작성한 컴포넌트가 실행되는 순서를 좀 더 자세히 들여다보면 다음과 같습니다.

    1. docker pull python:3.7
    2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
    3. run command

    생성된 yaml 파일을 자세히 보면, 다음과 같은 줄이 자동으로 추가되어 필요한 패키지가 설치되기 때문에 오류 없이 정상적으로 실행됩니다.

        command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - +이 때, packages_to_install argument 를 사용하면 패키지를 컨테이너에 쉽게 추가할 수 있습니다.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    스크립트를 실행하면 다음과 같은 train_from_csv.yaml 파일이 생성됩니다.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    위에 작성한 컴포넌트가 실행되는 순서를 좀 더 자세히 들여다보면 다음과 같습니다.

    1. docker pull python:3.7
    2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
    3. run command

    생성된 yaml 파일을 자세히 보면, 다음과 같은 줄이 자동으로 추가되어 필요한 패키지가 설치되기 때문에 오류 없이 정상적으로 실행됩니다.

        command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    + \ No newline at end of file diff --git a/docs/kubeflow/advanced-mlflow/index.html b/docs/kubeflow/advanced-mlflow/index.html index 522e35f1..c899d8d9 100644 --- a/docs/kubeflow/advanced-mlflow/index.html +++ b/docs/kubeflow/advanced-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -22,8 +22,8 @@ 이 때 업로드되는 MLflow의 endpoint를 우리가 설치한 mlflow service 로 이어지게 설정해주어야 합니다.
    이 때 S3 Endpoint의 주소는 MLflow Server 설치 당시 설치한 minio의 쿠버네티스 서비스 DNS 네임을 활용합니다. 해당 service 는 kubeflow namespace에서 minio-service라는 이름으로 생성되었으므로, http://minio-service.kubeflow.svc:9000 로 설정합니다.
    이와 비슷하게 tracking_uri의 주소는 mlflow server의 쿠버네티스 서비스 DNS 네임을 활용하여, http://mlflow-server-service.mlflow-system.svc:5000 로 설정합니다.

    from functools import partial
    from kfp.components import InputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    MLFlow Pipeline

    이제 작성한 컴포넌트들을 연결해서 파이프라인으로 만들어 보겠습니다.

    Data Component

    모델을 학습할 때 쓸 데이터는 sklearn의 iris 입니다. -데이터를 생성하는 컴포넌트를 작성합니다.

    from functools import partial

    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    Pipeline

    파이프라인 코드는 다음과 같이 작성할 수 있습니다.

    from kfp.dsl import pipeline


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )

    Run

    위에서 작성된 컴포넌트와 파이프라인을 하나의 파이썬 파일에 정리하면 다음과 같습니다.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

    mlflow_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: mlflow-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
    "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
    spec:
    entrypoint: mlflow-pipeline
    templates:
    - name: load-iris-data
    container:
    args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
    && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def load_iris_data(
    data_path,
    target_path,
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    import argparse
    _parser = argparse.ArgumentParser(prog='Load iris data', description='')
    _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = load_iris_data(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: load-iris-data-data, path: /tmp/outputs/data/data}
    - {name: load-iris-data-target, path: /tmp/outputs/target/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
    PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    ''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
    pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
    = pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
    = pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
    index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
    = argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
    dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
    "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
    "data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
    - name: mlflow-pipeline
    inputs:
    parameters:
    - {name: kernel}
    - {name: model_name}
    dag:
    tasks:
    - {name: load-iris-data, template: load-iris-data}
    - name: train-from-csv
    template: train-from-csv
    dependencies: [load-iris-data]
    arguments:
    parameters:
    - {name: kernel, value: '{{inputs.parameters.kernel}}'}
    artifacts:
    - {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
    - {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
    - name: upload-sklearn-model-to-mlflow
    template: upload-sklearn-model-to-mlflow
    dependencies: [train-from-csv]
    arguments:
    parameters:
    - {name: model_name, value: '{{inputs.parameters.model_name}}'}
    artifacts:
    - {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
    - {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
    - {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
    - {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
    - name: train-from-csv
    container:
    args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
    --kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
    --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
    --conda-env, /tmp/outputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    kernel,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: kernel}
    artifacts:
    - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
    - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
    outputs:
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/outputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
    {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
    {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
    "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
    "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
    ''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
    pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
    dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
    mlflow.models.signature import infer_signature\n from mlflow.utils.environment
    import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
    = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
    train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
    file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
    \"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
    = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
    \"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
    = _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
    \"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
    file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
    from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
    type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
    dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
    dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
    {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
    "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
    {"name": "input_example", "type": "dill"}, {"name": "signature", "type":
    "dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
    - name: upload-sklearn-model-to-mlflow
    container:
    args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
    --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
    --conda-env, /tmp/inputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' 'boto3' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def upload_sklearn_model_to_mlflow(
    model_name,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    import argparse
    _parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
    _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: model_name}
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/inputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
    "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
    {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
    --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
    ''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
    os\n import dill\n from mlflow.sklearn import save_model\n\n from
    mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
    = \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
    = \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
    = MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
    open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
    open(input_example_path, \"rb\") as file_reader:\n input_example
    = dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
    = dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
    = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
    = client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
    model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
    sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
    dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
    dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
    dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
    {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
    {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
    "name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
    arguments:
    parameters:
    - {name: kernel}
    - {name: model_name}
    serviceAccountName: pipeline-runner

    실행후 생성된 mlflow_pipeline.yaml 파일을 파이프라인 업로드한 후, 실행하여 run 의 결과를 확인합니다.

    mlflow-svc-0

    mlflow service를 포트포워딩해서 MLflow ui에 접속합니다.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    웹 브라우저를 열어 localhost:5000으로 접속하면, 다음과 같이 run이 생성된 것을 확인할 수 있습니다.

    mlflow-svc-1

    run 을 클릭해서 확인하면 학습한 모델 파일이 있는 것을 확인할 수 있습니다.

    mlflow-svc-2

    - +데이터를 생성하는 컴포넌트를 작성합니다.

    from functools import partial

    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    Pipeline

    파이프라인 코드는 다음과 같이 작성할 수 있습니다.

    from kfp.dsl import pipeline


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )

    Run

    위에서 작성된 컴포넌트와 파이프라인을 하나의 파이썬 파일에 정리하면 다음과 같습니다.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

    mlflow_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: mlflow-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
    "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
    spec:
    entrypoint: mlflow-pipeline
    templates:
    - name: load-iris-data
    container:
    args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
    && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def load_iris_data(
    data_path,
    target_path,
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    import argparse
    _parser = argparse.ArgumentParser(prog='Load iris data', description='')
    _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = load_iris_data(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: load-iris-data-data, path: /tmp/outputs/data/data}
    - {name: load-iris-data-target, path: /tmp/outputs/target/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
    PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    ''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
    pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
    = pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
    = pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
    index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
    = argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
    dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
    "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
    "data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
    - name: mlflow-pipeline
    inputs:
    parameters:
    - {name: kernel}
    - {name: model_name}
    dag:
    tasks:
    - {name: load-iris-data, template: load-iris-data}
    - name: train-from-csv
    template: train-from-csv
    dependencies: [load-iris-data]
    arguments:
    parameters:
    - {name: kernel, value: '{{inputs.parameters.kernel}}'}
    artifacts:
    - {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
    - {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
    - name: upload-sklearn-model-to-mlflow
    template: upload-sklearn-model-to-mlflow
    dependencies: [train-from-csv]
    arguments:
    parameters:
    - {name: model_name, value: '{{inputs.parameters.model_name}}'}
    artifacts:
    - {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
    - {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
    - {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
    - {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
    - name: train-from-csv
    container:
    args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
    --kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
    --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
    --conda-env, /tmp/outputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    kernel,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: kernel}
    artifacts:
    - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
    - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
    outputs:
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/outputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
    {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
    {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
    "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
    "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
    ''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
    pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
    dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
    mlflow.models.signature import infer_signature\n from mlflow.utils.environment
    import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
    = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
    train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
    file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
    \"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
    = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
    \"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
    = _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
    \"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
    file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
    from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
    type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
    dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
    dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
    {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
    "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
    {"name": "input_example", "type": "dill"}, {"name": "signature", "type":
    "dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
    - name: upload-sklearn-model-to-mlflow
    container:
    args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
    --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
    --conda-env, /tmp/inputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' 'boto3' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def upload_sklearn_model_to_mlflow(
    model_name,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    import argparse
    _parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
    _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: model_name}
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/inputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
    "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
    {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
    --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
    ''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
    os\n import dill\n from mlflow.sklearn import save_model\n\n from
    mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
    = \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
    = \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
    = MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
    open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
    open(input_example_path, \"rb\") as file_reader:\n input_example
    = dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
    = dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
    = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
    = client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
    model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
    sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
    dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
    dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
    dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
    {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
    {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
    "name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
    arguments:
    parameters:
    - {name: kernel}
    - {name: model_name}
    serviceAccountName: pipeline-runner

    실행후 생성된 mlflow_pipeline.yaml 파일을 파이프라인 업로드한 후, 실행하여 run 의 결과를 확인합니다.

    mlflow-svc-0

    mlflow service를 포트포워딩해서 MLflow ui에 접속합니다.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    웹 브라우저를 열어 localhost:5000으로 접속하면, 다음과 같이 run이 생성된 것을 확인할 수 있습니다.

    mlflow-svc-1

    run 을 클릭해서 확인하면 학습한 모델 파일이 있는 것을 확인할 수 있습니다.

    mlflow-svc-2

    + \ No newline at end of file diff --git a/docs/kubeflow/advanced-pipeline/index.html b/docs/kubeflow/advanced-pipeline/index.html index 08026961..2167b020 100644 --- a/docs/kubeflow/advanced-pipeline/index.html +++ b/docs/kubeflow/advanced-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ 만약 GPU를 사용해 모델을 학습해야 할 때 쿠버네티스상에서 GPU를 할당받지 못해 제대로 학습이 이루어지지 않습니다.
    이를 위해 set_gpu_limit() attribute을 이용해 설정할 수 있습니다.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    위의 스크립트를 실행하면 생성된 파일에서 sum-and-print-numbers를 자세히 보면 resources에 {nvidia.com/gpu: 1} 도 추가된 것을 볼 수 있습니다. 이를 통해 GPU를 할당받을 수 있습니다.

      - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    resources:
    limits: {nvidia.com/gpu: 1}

    CPU

    cpu의 개수를 정하기 위해서 이용하는 함수는 .set_cpu_limit() attribute을 이용해 설정할 수 있습니다.
    -gpu와는 다른 점은 int가 아닌 string으로 입력해야 한다는 점입니다.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    바뀐 부분만 확인하면 다음과 같습니다.

          resources:
    limits: {nvidia.com/gpu: 1, cpu: '16'}

    Memory

    메모리는 .set_memory_limit() attribute을 이용해 설정할 수 있습니다.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    바뀐 부분만 확인하면 다음과 같습니다.

          resources:
    limits: {nvidia.com/gpu: 1, memory: 1G}
    - +gpu와는 다른 점은 int가 아닌 string으로 입력해야 한다는 점입니다.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    바뀐 부분만 확인하면 다음과 같습니다.

          resources:
    limits: {nvidia.com/gpu: 1, cpu: '16'}

    Memory

    메모리는 .set_memory_limit() attribute을 이용해 설정할 수 있습니다.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    바뀐 부분만 확인하면 다음과 같습니다.

          resources:
    limits: {nvidia.com/gpu: 1, memory: 1G}
    + \ No newline at end of file diff --git a/docs/kubeflow/advanced-run/index.html b/docs/kubeflow/advanced-run/index.html index b372dab7..6c2ac298 100644 --- a/docs/kubeflow/advanced-run/index.html +++ b/docs/kubeflow/advanced-run/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 이때는 Output artifacts의 main-logs에서 확인할 수 있습니다.

    Visualizations

    Visualizations에서는 컴포넌트에서 생성된 플랏을 보여줍니다.

    플랏을 생성하기 위해서는 mlpipeline_ui_metadata: OutputPath("UI_Metadata") argument로 보여주고 싶은 값을 저장하면 됩니다. 이 때 플랏의 형태는 html 포맷이어야 합니다. 변환하는 과정은 다음과 같습니다.


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(
    mlpipeline_ui_metadata: OutputPath("UI_Metadata")
    ):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot(x=[1, 2, 3], y=[1, 2,3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    파이프라인으로 작성하면 다음과 같이 됩니다.

    from functools import partial

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot([1, 2, 3], [1, 2, 3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)


    @pipeline(name="plot_pipeline")
    def plot_pipeline():
    plot_linear()


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")

    이 스크립트를 실행해서 나온 plot_pipeline.yaml을 확인하면 다음과 같습니다.

    plot_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: plot-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: '2
    022-01-17T13:31:32.963214',
    pipelines.kubeflow.org/pipeline_spec: '{"name": "plot_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}
    spec:
    entrypoint: plot-pipeline
    templates:
    - name: plot-linear
    container:
    args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'matplotlib' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet
    --no-warn-script-location 'matplotlib' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path
    def plot_linear(mlpipeline_ui_metadata):
    import base64
    import json
    from io import BytesIO
    import matplotlib.pyplot as plt
    plt.plot([1, 2, 3], [1, 2, 3])
    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")
    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Plot linear', description='')
    _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())
    _outputs = plot_linear(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.9
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''matplotlib'' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''matplotlib''
    --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef plot_linear(mlpipeline_ui_metadata):\n import
    base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot
    as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile,
    format=\"png\")\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\"utf-8\")\n\n html
    = f\"<img src=''data:image/png;base64,{encoded}''>\"\n metadata = {\n \"outputs\":
    [\n {\n \"type\": \"web-app\",\n \"storage\":
    \"inline\",\n \"source\": html,\n },\n ],\n }\n with
    open(mlpipeline_ui_metadata, \"w\") as html_writer:\n json.dump(metadata,
    html_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Plot
    linear'', description='''')\n_parser.add_argument(\"--mlpipeline-ui-metadata\",
    dest=\"mlpipeline_ui_metadata\", type=_make_parent_dirs_and_return_path,
    required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs
    = plot_linear(**_parsed_args)\n"], "image": "python:3.7"}}, "name": "Plot
    linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}',
    pipelines.kubeflow.org/component_ref: '{}'}
    - name: plot-pipeline
    dag:
    tasks:
    - {name: plot-linear, template: plot-linear}
    arguments:
    parameters: []
    serviceAccountName: pipeline-runner

    실행 후 Visualization을 클릭합니다.

    advanced-run-5.png

    Run output

    advanced-run-2.png

    Run output은 kubeflow에서 지정한 형태로 생긴 Artifacts를 모아서 보여주는 곳이며 평가 지표(Metric)를 보여줍니다.

    평가 지표(Metric)을 보여주기 위해서는 mlpipeline_metrics_path: OutputPath("Metrics") argument에 보여주고 싶은 이름과 값을 json 형태로 저장하면 됩니다. 예를 들어서 다음과 같이 작성할 수 있습니다.

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    평가 지표를 생성하는 컴포넌트를 파이프라인에서 생성한 파이프라인에 추가 후 실행해 보겠습니다. -전체 파이프라인은 다음과 같습니다.

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int) -> int:
    sum_number = number_1 + number_2
    print(sum_number)
    return sum_number

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )
    show_metric_of_sum(sum_result.output)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    실행 후 Run Output을 클릭하면 다음과 같이 나옵니다.

    advanced-run-4.png

    Config

    advanced-run-3.png

    Config에서는 파이프라인 Config로 입력받은 모든 값을 확인할 수 있습니다.

    - +전체 파이프라인은 다음과 같습니다.

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int) -> int:
    sum_number = number_1 + number_2
    print(sum_number)
    return sum_number

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )
    show_metric_of_sum(sum_result.output)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    실행 후 Run Output을 클릭하면 다음과 같이 나옵니다.

    advanced-run-4.png

    Config

    advanced-run-3.png

    Config에서는 파이프라인 Config로 입력받은 모든 값을 확인할 수 있습니다.

    + \ No newline at end of file diff --git a/docs/kubeflow/basic-component/index.html b/docs/kubeflow/basic-component/index.html index 4040c4f1..a2de73f3 100644 --- a/docs/kubeflow/basic-component/index.html +++ b/docs/kubeflow/basic-component/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 자세한 내용은 Kubeflow 공식 문서를 참고 하시길 바랍니다.
    예를 들어서 입력받은 숫자를 2로 나눈 몫과 나머지를 반환하는 컴포넌트는 다음과 같이 작성해야 합니다.

    from typing import NamedTuple


    def divide_and_return_number(
    number: int,
    ) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):
    from collections import namedtuple

    quotient, remainder = divmod(number, 2)
    print("quotient is", quotient)
    print("remainder is", remainder)

    divide_outputs = namedtuple(
    "DivideOutputs",
    [
    "quotient",
    "remainder",
    ],
    )
    return divide_outputs(quotient, remainder)

    Convert to Kubeflow Format

    이제 작성한 컴포넌트를 kubeflow에서 사용할 수 있는 포맷으로 변환해야 합니다. 변환은 kfp.components.create_component_from_func 를 통해서 할 수 있습니다.
    이렇게 변환된 형태는 파이썬에서 함수로 import 하여서 파이프라인에서 사용할 수 있습니다.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    Share component with yaml file

    만약 파이썬 코드로 공유를 할 수 없는 경우 YAML 파일로 컴포넌트를 공유해서 사용할 수 있습니다. -이를 위해서는 우선 컴포넌트를 YAML 파일로 변환한 뒤 kfp.components.load_component_from_file 을 통해 파이프라인에서 사용할 수 있습니다.

    우선 작성한 컴포넌트를 YAML 파일로 변환하는 과정에 대해서 설명합니다.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    if __name__ == "__main__":
    print_and_return_number.component_spec.save("print_and_return_number.yaml")

    작성한 파이썬 코드를 실행하면 print_and_return_number.yaml 파일이 생성됩니다. 파일을 확인하면 다음과 같습니다.

    name: Print and return number
    inputs:
    - {name: number, type: Integer}
    outputs:
    - {name: Output, type: Integer}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    args:
    - --number
    - {inputValue: number}
    - '----output-paths'
    - {outputPath: Output}

    이제 생성된 파일을 공유해서 파이프라인에서 다음과 같이 사용할 수 있습니다.

    from kfp.components import load_component_from_file

    print_and_return_number = load_component_from_file("print_and_return_number.yaml")

    How Kubeflow executes component

    Kubeflow에서 컴포넌트가 실행되는 순서는 다음과 같습니다.

    1. docker pull <image>: 정의된 컴포넌트의 실행 환경 정보가 담긴 이미지를 pull
    2. run command: pull 한 이미지에서 컴포넌트 콘텐츠를 실행합니다.

    print_and_return_number.yaml 를 예시로 들자면 @create_component_from_func 의 default image 는 python:3.7 이므로 해당 이미지를 기준으로 컴포넌트 콘텐츠를 실행하게 됩니다.

    1. docker pull python:3.7
    2. print(number)

    References:

    - +이를 위해서는 우선 컴포넌트를 YAML 파일로 변환한 뒤 kfp.components.load_component_from_file 을 통해 파이프라인에서 사용할 수 있습니다.

    우선 작성한 컴포넌트를 YAML 파일로 변환하는 과정에 대해서 설명합니다.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    if __name__ == "__main__":
    print_and_return_number.component_spec.save("print_and_return_number.yaml")

    작성한 파이썬 코드를 실행하면 print_and_return_number.yaml 파일이 생성됩니다. 파일을 확인하면 다음과 같습니다.

    name: Print and return number
    inputs:
    - {name: number, type: Integer}
    outputs:
    - {name: Output, type: Integer}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    args:
    - --number
    - {inputValue: number}
    - '----output-paths'
    - {outputPath: Output}

    이제 생성된 파일을 공유해서 파이프라인에서 다음과 같이 사용할 수 있습니다.

    from kfp.components import load_component_from_file

    print_and_return_number = load_component_from_file("print_and_return_number.yaml")

    How Kubeflow executes component

    Kubeflow에서 컴포넌트가 실행되는 순서는 다음과 같습니다.

    1. docker pull <image>: 정의된 컴포넌트의 실행 환경 정보가 담긴 이미지를 pull
    2. run command: pull 한 이미지에서 컴포넌트 콘텐츠를 실행합니다.

    print_and_return_number.yaml 를 예시로 들자면 @create_component_from_func 의 default image 는 python:3.7 이므로 해당 이미지를 기준으로 컴포넌트 콘텐츠를 실행하게 됩니다.

    1. docker pull python:3.7
    2. print(number)

    References:

    + \ No newline at end of file diff --git a/docs/kubeflow/basic-pipeline-upload/index.html b/docs/kubeflow/basic-pipeline-upload/index.html index 2bb8060b..b3997000 100644 --- a/docs/kubeflow/basic-pipeline-upload/index.html +++ b/docs/kubeflow/basic-pipeline-upload/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    6. Pipeline - Upload

    Upload Pipeline

    이제 우리가 만든 파이프라인을 직접 kubeflow에서 업로드 해 보겠습니다.
    파이프라인 업로드는 kubeflow 대시보드 UI를 통해 진행할 수 있습니다. Install Kubeflow 에서 사용한 방법을 이용해 포트포워딩합니다.

    kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

    http://localhost:8080에 접속해 대시보드를 열어줍니다.

    1. Pipelines 탭 선택

    pipeline-gui-0.png

    2. Upload Pipeline 선택

    pipeline-gui-1.png

    3. Choose file 선택

    pipeline-gui-2.png

    4. 생성된 yaml파일 업로드

    pipeline-gui-3.png

    5. Create

    pipeline-gui-4.png

    Upload Pipeline Version

    업로드된 파이프라인은 업로드를 통해서 버전을 관리할 수 있습니다. 다만 깃헙과 같은 코드 차원의 버전 관리가 아닌 같은 이름의 파이프라인을 모아서 보여주는 역할을 합니다. -위의 예시에서 파이프라인을 업로드한 경우 다음과 같이 example_pipeline이 생성된 것을 확인할 수 있습니다.

    pipeline-gui-5.png

    클릭하면 다음과 같은 화면이 나옵니다.

    pipeline-gui-4.png

    Upload Version을 클릭하면 다음과 같이 파이프라인을 업로드할 수 있는 화면이 생성됩니다.

    pipeline-gui-6.png

    파이프라인을 업로드 합니다.

    pipeline-gui-7.png

    업로드된 경우 다음과 같이 파이프라인 버전을 확인할 수 있습니다.

    pipeline-gui-8.png

    - +위의 예시에서 파이프라인을 업로드한 경우 다음과 같이 example_pipeline이 생성된 것을 확인할 수 있습니다.

    pipeline-gui-5.png

    클릭하면 다음과 같은 화면이 나옵니다.

    pipeline-gui-4.png

    Upload Version을 클릭하면 다음과 같이 파이프라인을 업로드할 수 있는 화면이 생성됩니다.

    pipeline-gui-6.png

    파이프라인을 업로드 합니다.

    pipeline-gui-7.png

    업로드된 경우 다음과 같이 파이프라인 버전을 확인할 수 있습니다.

    pipeline-gui-8.png

    + \ No newline at end of file diff --git a/docs/kubeflow/basic-pipeline/index.html b/docs/kubeflow/basic-pipeline/index.html index 5852b4f1..fca474f7 100644 --- a/docs/kubeflow/basic-pipeline/index.html +++ b/docs/kubeflow/basic-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -21,8 +21,8 @@ 만약, 여러 개의 반환 값이 있다면 outputs에 저장이 되며 dict 타입이기에 key를 이용해 원하는 반환 값을 사용할 수 있습니다. 예를 들어서 앞에서 작성한 여러 개를 반환하는 컴포넌트 의 경우를 보겠습니다. divde_and_return_number 의 return 값은 quotientremainder 가 있습니다. 이 두 값을 print_and_return_number 에 전달하는 예시를 보면 다음과 같습니다.

    def multi_pipeline():
    divided_result = divde_and_return_number(number)
    num_1_result = print_and_return_number(divided_result.outputs["quotient"])
    num_2_result = print_and_return_number(divided_result.outputs["remainder"])

    divde_and_return_number의 결과를 divided_result에 저장하고 각각 divided_result.outputs["quotient"], divided_result.outputs["remainder"]로 값을 가져올 수 있습니다.

    Write to python code

    이제 다시 본론으로 돌아와서 이 두 값의 결과를 sum_and_print_numbers 에 전달합니다.

    def example_pipeline():
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    다음으로 각 컴포넌트에 필요한 Config들을 모아서 파이프라인 Config로 정의 합니다.

    def example_pipeline(number_1: int, number_2:int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    Convert to Kubeflow Format

    마지막으로 kubeflow에서 사용할 수 있는 형식으로 변환합니다. 변환은 kfp.dsl.pipeline 함수를 이용해 할 수 있습니다.

    from kfp.dsl import pipeline


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    Kubeflow에서 파이프라인을 실행하기 위해서는 yaml 형식으로만 가능하기 때문에 생성한 파이프라인을 정해진 yaml 형식으로 컴파일(Compile) 해 주어야 합니다. -컴파일은 다음 명령어를 이용해 생성할 수 있습니다.

    if __name__ == "__main__":
    import kfp
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    Conclusion

    앞서 설명한 내용을 한 파이썬 코드로 모으면 다음과 같이 됩니다.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    컴파일된 결과를 보면 다음과 같습니다.

    example_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: example-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
    "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
    spec:
    entrypoint: example-pipeline
    templates:
    - name: example-pipeline
    inputs:
    parameters:
    - {name: number_1}
    - {name: number_2}
    dag:
    tasks:
    - name: print-and-return-number
    template: print-and-return-number
    arguments:
    parameters:
    - {name: number_1, value: '{{inputs.parameters.number_1}}'}
    - name: print-and-return-number-2
    template: print-and-return-number-2
    arguments:
    parameters:
    - {name: number_2, value: '{{inputs.parameters.number_2}}'}
    - name: sum-and-print-numbers
    template: sum-and-print-numbers
    dependencies: [print-and-return-number, print-and-return-number-2]
    arguments:
    parameters:
    - {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
    - {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
    - name: print-and-return-number
    container:
    args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_1}
    outputs:
    parameters:
    - name: print-and-return-number-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_1}}"}'}
    - name: print-and-return-number-2
    container:
    args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_2}
    outputs:
    parameters:
    - name: print-and-return-number-2-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_2}}"}'}
    - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: print-and-return-number-2-Output}
    - {name: print-and-return-number-Output}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
    "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
    argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
    description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
    type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
    dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
    {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
    pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
    "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
    arguments:
    parameters:
    - {name: number_1}
    - {name: number_2}
    serviceAccountName: pipeline-runner
    - +컴파일은 다음 명령어를 이용해 생성할 수 있습니다.

    if __name__ == "__main__":
    import kfp
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    Conclusion

    앞서 설명한 내용을 한 파이썬 코드로 모으면 다음과 같이 됩니다.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    컴파일된 결과를 보면 다음과 같습니다.

    example_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: example-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
    "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
    spec:
    entrypoint: example-pipeline
    templates:
    - name: example-pipeline
    inputs:
    parameters:
    - {name: number_1}
    - {name: number_2}
    dag:
    tasks:
    - name: print-and-return-number
    template: print-and-return-number
    arguments:
    parameters:
    - {name: number_1, value: '{{inputs.parameters.number_1}}'}
    - name: print-and-return-number-2
    template: print-and-return-number-2
    arguments:
    parameters:
    - {name: number_2, value: '{{inputs.parameters.number_2}}'}
    - name: sum-and-print-numbers
    template: sum-and-print-numbers
    dependencies: [print-and-return-number, print-and-return-number-2]
    arguments:
    parameters:
    - {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
    - {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
    - name: print-and-return-number
    container:
    args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_1}
    outputs:
    parameters:
    - name: print-and-return-number-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_1}}"}'}
    - name: print-and-return-number-2
    container:
    args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_2}
    outputs:
    parameters:
    - name: print-and-return-number-2-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_2}}"}'}
    - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: print-and-return-number-2-Output}
    - {name: print-and-return-number-Output}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
    "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
    argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
    description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
    type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
    dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
    {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
    pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
    "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
    arguments:
    parameters:
    - {name: number_1}
    - {name: number_2}
    serviceAccountName: pipeline-runner
    + \ No newline at end of file diff --git a/docs/kubeflow/basic-requirements/index.html b/docs/kubeflow/basic-requirements/index.html index 6a9a8908..31ad8272 100644 --- a/docs/kubeflow/basic-requirements/index.html +++ b/docs/kubeflow/basic-requirements/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    3. Install Requirements

    실습을 위해 권장하는 파이썬 버전은 python>=3.7입니다. 파이썬 환경에 익숙하지 않은 분들은 다음 Appendix 1. 파이썬 가상환경을 참고하여 클라이언트 노드에 설치해주신 뒤 패키지 설치를 진행해주시기를 바랍니다.

    실습을 진행하기에서 필요한 패키지들과 버전은 다음과 같습니다.

    • requirements.txt

      kfp==1.8.9
      scikit-learn==1.0.1
      mlflow==1.21.0
      pandas==1.3.4
      dill==0.3.4

    앞에서 만든 파이썬 가상환경을 활성화합니다.

    pyenv activate demo

    패키지 설치를 진행합니다.

    pip3 install -U pip
    pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
    - +
    버전: 1.0

    3. Install Requirements

    실습을 위해 권장하는 파이썬 버전은 python>=3.7입니다. 파이썬 환경에 익숙하지 않은 분들은 다음 Appendix 1. 파이썬 가상환경을 참고하여 클라이언트 노드에 설치해주신 뒤 패키지 설치를 진행해주시기를 바랍니다.

    실습을 진행하기에서 필요한 패키지들과 버전은 다음과 같습니다.

    • requirements.txt

      kfp==1.8.9
      scikit-learn==1.0.1
      mlflow==1.21.0
      pandas==1.3.4
      dill==0.3.4

    앞에서 만든 파이썬 가상환경을 활성화합니다.

    pyenv activate demo

    패키지 설치를 진행합니다.

    pip3 install -U pip
    pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
    + \ No newline at end of file diff --git a/docs/kubeflow/basic-run/index.html b/docs/kubeflow/basic-run/index.html index cdc6f23a..b00c5a6e 100644 --- a/docs/kubeflow/basic-run/index.html +++ b/docs/kubeflow/basic-run/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    7. Pipeline - Run

    Run Pipeline

    이제 업로드한 파이프라인을 실행시켜 보겠습니다.

    Before Run

    1. Create Experiment

    Experiment란 Kubeflow 에서 실행되는 Run을 논리적으로 관리하는 단위입니다.

    Kubeflow에서 namespace를 처음 들어오면 생성되어 있는 Experiment가 없습니다. 따라서 파이프라인을 실행하기 전에 미리 Experiment를 생성해두어야 합니다. Experiment이 있다면 Run Pipeline으로 넘어가도 무방합니다.

    Experiment는 Create Experiment 버튼을 통해 생성할 수 있습니다.

    run-0.png

    2. Name 입력

    Experiment로 사용할 이름을 입력합니다. run-1.png

    Run Pipeline

    1. Create Run 선택

    run-2.png

    2. Experiment 선택

    run-9.png

    run-10.png

    3. Pipeline Config 입력

    파이프라인을 생성할 때 입력한 Config 값들을 채워 넣습니다. 업로드한 파이프라인은 number_1과 number_2를 입력해야 합니다.

    run-3.png

    4. Start

    입력 후 Start 버튼을 누르면 파이프라인이 실행됩니다.

    run-4.png

    Run Result

    실행된 파이프라인들은 Runs 탭에서 확인할 수 있습니다. -Run을 클릭하면 실행된 파이프라인과 관련된 자세한 내용을 확인해 볼 수 있습니다.

    run-5.png

    클릭하면 다음과 같은 화면이 나옵니다. 아직 실행되지 않은 컴포넌트는 회색 표시로 나옵니다.

    run-6.png

    컴포넌트가 실행이 완료되면 초록색 체크 표시가 나옵니다.

    run-7.png

    가장 마지막 컴포넌트를 보면 입력한 Config인 3과 5의 합인 8이 출력된 것을 확인할 수 있습니다.

    run-8.png

    - +Run을 클릭하면 실행된 파이프라인과 관련된 자세한 내용을 확인해 볼 수 있습니다.

    run-5.png

    클릭하면 다음과 같은 화면이 나옵니다. 아직 실행되지 않은 컴포넌트는 회색 표시로 나옵니다.

    run-6.png

    컴포넌트가 실행이 완료되면 초록색 체크 표시가 나옵니다.

    run-7.png

    가장 마지막 컴포넌트를 보면 입력한 Config인 3과 5의 합인 8이 출력된 것을 확인할 수 있습니다.

    run-8.png

    + \ No newline at end of file diff --git a/docs/kubeflow/how-to-debug/index.html b/docs/kubeflow/how-to-debug/index.html index 31a69b20..1dd77c18 100644 --- a/docs/kubeflow/how-to-debug/index.html +++ b/docs/kubeflow/how-to-debug/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 우선 컴포넌트를 클릭하고 Input/Ouput 탭에서 입력값으로 들어간 데이터들을 다운로드 받습니다.
    다운로드는 빨간색 네모로 표시된 곳의 링크를 클릭하면 됩니다.

    debug-5.png

    두 개의 파일을 같은 경로에 다운로드합니다.
    그리고 해당 경로로 이동해서 파일을 확인합니다.

    ls

    다음과 같이 두 개의 파일이 있습니다.

    drop-na-from-csv-output.tgz load-iris-data-target.tgz

    압축을 풀어보겠습니다.

    tar -xzvf load-iris-data-target.tgz ; mv data target.csv
    tar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv

    그리고 이를 주피터 노트북을 이용해 컴포넌트 코드를 실행합니다.

    debug-3.png

    디버깅을 해본 결과 dropna 할 때 column을 기준으로 drop을 해야 하는데 row를 기준으로 drop을 해서 데이터가 모두 사라졌습니다. -이제 문제의 원인을 알아냈으니 column을 기준으로 drop이 되게 컴포넌트를 수정합니다.

    @partial(
    create_component_from_func,
    packages_to_install=["pandas"],
    )
    def drop_na_from_csv(
    data_path: InputPath("csv"),
    output_path: OutputPath("csv"),
    ):
    import pandas as pd

    data = pd.read_csv(data_path)
    data = data.dropna(axis="columns")
    data.to_csv(output_path, index=False)

    수정 후 파이프라인을 다시 업로드하고 실행하면 다음과 같이 정상적으로 수행하는 것을 확인할 수 있습니다.

    debug-6.png

    - +이제 문제의 원인을 알아냈으니 column을 기준으로 drop이 되게 컴포넌트를 수정합니다.

    @partial(
    create_component_from_func,
    packages_to_install=["pandas"],
    )
    def drop_na_from_csv(
    data_path: InputPath("csv"),
    output_path: OutputPath("csv"),
    ):
    import pandas as pd

    data = pd.read_csv(data_path)
    data = data.dropna(axis="columns")
    data.to_csv(output_path, index=False)

    수정 후 파이프라인을 다시 업로드하고 실행하면 다음과 같이 정상적으로 수행하는 것을 확인할 수 있습니다.

    debug-6.png

    + \ No newline at end of file diff --git a/docs/kubeflow/kubeflow-concepts/index.html b/docs/kubeflow/kubeflow-concepts/index.html index d3f1e777..d1a4b28e 100644 --- a/docs/kubeflow/kubeflow-concepts/index.html +++ b/docs/kubeflow/kubeflow-concepts/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 컴포넌트 래퍼가 콘텐츠를 감싸면 다음과 같이 됩니다.

    concept-4.png

    Artifacts

    위의 설명에서 컴포넌트는 아티팩트(Artifacts)를 생성한다고 했습니다. 아티팩트란 evaluation result, log 등 어떤 형태로든 파일로 생성되는 것을 통틀어서 칭하는 용어입니다. 그중 우리가 관심을 두는 유의미한 것들은 다음과 같은 것들이 있습니다.

    concept-5.png

    • Model
    • Data
    • Metric
    • etc

    Model

    저희는 모델을 다음과 같이 정의 했습니다.

    모델이란 파이썬 코드와 학습된 Weights와 Network 구조 그리고 이를 실행시키기 위한 환경이 모두 포함된 형태

    Data

    데이터는 전 처리된 피처, 모델의 예측 값 등을 포함합니다.

    Metric

    Metric은 동적 지표와 정적 지표 두 가지로 나누었습니다.

    • 동적 지표란 train loss와 같이 학습이 진행되는 중 에폭(Epoch)마다 계속해서 변화하는 값을 의미합니다.
    • 정적 지표란 학습이 끝난 후 최종적으로 모델을 평가하는 정확도 등을 의미합니다.

    Pipeline

    파이프라인은 컴포넌트의 집합과 컴포넌트를 실행시키는 순서도로 구성되어 있습니다. 이 때, 순서도는 방향 순환이 없는 그래프로 이루어져 있으며, 간단한 조건문을 포함할 수 있습니다.

    concept-6.png

    Pipeline Config

    앞서 컴포넌트를 실행시키기 위해서는 Config가 필요하다고 설명했습니다. 파이프라인을 구성하는 컴포넌트의 Config 들을 모아 둔 것이 파이프라인 Config입니다.

    concept-7.png

    Run

    파이프라인이 필요로 하는 파이프라인 Config가 주어져야지만 파이프라인을 실행할 수 있습니다.
    Kubeflow에서는 실행된 파이프라인을 Run 이라고 부릅니다.

    concept-8.png

    파이프라인이 실행되면 각 컴포넌트가 아티팩트들을 생성합니다. -Kubeflow pipeline에서는 Run 하나당 고유한 ID 를 생성하고, Run에서 생성되는 모든 아티팩트들을 저장합니다.

    concept-9.png

    그러면 이제 직접 컴포넌트와 파이프라인을 작성하는 방법에 대해서 알아보도록 하겠습니다.

    - +Kubeflow pipeline에서는 Run 하나당 고유한 ID 를 생성하고, Run에서 생성되는 모든 아티팩트들을 저장합니다.

    concept-9.png

    그러면 이제 직접 컴포넌트와 파이프라인을 작성하는 방법에 대해서 알아보도록 하겠습니다.

    + \ No newline at end of file diff --git a/docs/kubeflow/kubeflow-intro/index.html b/docs/kubeflow/kubeflow-intro/index.html index 3190bf13..42148b29 100644 --- a/docs/kubeflow/kubeflow-intro/index.html +++ b/docs/kubeflow/kubeflow-intro/index.html @@ -7,13 +7,13 @@ - +
    -
    버전: 1.0

    1. Kubeflow Introduction

    Kubeflow를 사용하기 위해서는 컴포넌트(Component)와 파이프라인(Pipeline)을 작성해야 합니다.

    모두의 MLOps에서 설명하는 방식은 Kubeflow Pipeline 공식 홈페이지에서 설명하는 방식과는 다소 차이가 있습니다. 여기에서는 Kubeflow Pipeline을 워크플로(Workflow)가 아닌 앞서 설명한 MLOps를 구성하는 요소 중 하나의 컴포넌트로 사용하기 때문입니다.

    그럼 이제 컴포넌트와 파이프라인은 무엇이며 어떻게 작성할 수 있는지 알아보도록 하겠습니다.

    - +
    버전: 1.0

    1. Kubeflow Introduction

    Kubeflow를 사용하기 위해서는 컴포넌트(Component)와 파이프라인(Pipeline)을 작성해야 합니다.

    모두의 MLOps에서 설명하는 방식은 Kubeflow Pipeline 공식 홈페이지에서 설명하는 방식과는 다소 차이가 있습니다. 여기에서는 Kubeflow Pipeline을 워크플로(Workflow)가 아닌 앞서 설명한 MLOps를 구성하는 요소 중 하나의 컴포넌트로 사용하기 때문입니다.

    그럼 이제 컴포넌트와 파이프라인은 무엇이며 어떻게 작성할 수 있는지 알아보도록 하겠습니다.

    + \ No newline at end of file diff --git a/docs/prerequisites/docker/advanced/index.html b/docs/prerequisites/docker/advanced/index.html index a2a6c6af..7182a060 100644 --- a/docs/prerequisites/docker/advanced/index.html +++ b/docs/prerequisites/docker/advanced/index.html @@ -7,7 +7,7 @@ - + @@ -33,8 +33,8 @@ 이렇게 docker container 내부에서 빠져나온 상황을 detached 라고 부릅니다. 도커에서는 run 을 실행함과 동시에 detached mode 로 실행시킬 수 있는 옵션을 제공합니다.

    Third Practice

    docker run -d ubuntu sleep 10

    detached mode 이므로 해당 명령을 실행시킨 터미널에서 다른 액션을 수행시킬 수 있습니다.

    상황에 따라 detached mode 를 적절히 활용하면 좋습니다.
    예를 들어, DB 와 통신하는 Backend API server 를 개발할 때 Backend API server 는 source code 를 변경시켜가면서 hot-loading 으로 계속해서 로그를 확인해봐야 하지만, DB 는 로그를 지켜볼 필요는 없는 경우라면 다음과 같이 실행할 수 있습니다.
    -DB 는 docker container 를 detached mode 로 실행시키고, Backend API server 는 attached mode 로 log 를 following 하면서 실행시키면 효율적입니다.

    References

    - +DB 는 docker container 를 detached mode 로 실행시키고, Backend API server 는 attached mode 로 log 를 following 하면서 실행시키면 효율적입니다.

    References

    + \ No newline at end of file diff --git a/docs/prerequisites/docker/command/index.html b/docs/prerequisites/docker/command/index.html index c4de4198..930cc50e 100644 --- a/docs/prerequisites/docker/command/index.html +++ b/docs/prerequisites/docker/command/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ 그런데 종료된 컨테이너는 왜 지워야 할까요?
    종료되어 있는 도커에는 이전에 사용한 데이터가 아직 컨테이너 내부에 남아있습니다. 그래서 restart 등을 통해서 컨테이너를 재시작할 수 있습니다. -그런데 이 과정에서 disk를 사용하게 됩니다.

    그래서 완전히 사용하지 않는 컨테이너를 지우기 위해서는 docker rm 명령어를 사용해야 합니다.

    우선 현재 컨테이너들을 확인합니다.

    docker ps -a

    다음과 같이 3개의 컨테이너가 있습니다.

    CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
    730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

    아래 명령어를 통해 demo3 컨테이너를 삭제해 보겠습니다.

    docker rm demo3

    docker ps -a 명령어를 치면 다음과 같이 2개로 줄었습니다.

    CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

    나머지 컨테이너들도 삭제합니다.

    docker rm demo2
    docker rm demo1

    10. Docker rmi

    도커 이미지를 삭제하는 커맨드입니다.

    docker rmi --help

    아래 명령어를 통해 현재 어떤 이미지들이 로컬에 있는지 확인합니다.

    docker images

    다음과 같이 출력됩니다.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    busybox latest a8440bba1bc0 32 hours ago 1.41MB
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    busybox 이미지를 삭제해 보겠습니다.

    docker rmi busybox

    다시 docker images를 칠 경우 다음과 같이 나옵니다.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    References

    - +그런데 이 과정에서 disk를 사용하게 됩니다.

    그래서 완전히 사용하지 않는 컨테이너를 지우기 위해서는 docker rm 명령어를 사용해야 합니다.

    우선 현재 컨테이너들을 확인합니다.

    docker ps -a

    다음과 같이 3개의 컨테이너가 있습니다.

    CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
    730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

    아래 명령어를 통해 demo3 컨테이너를 삭제해 보겠습니다.

    docker rm demo3

    docker ps -a 명령어를 치면 다음과 같이 2개로 줄었습니다.

    CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

    나머지 컨테이너들도 삭제합니다.

    docker rm demo2
    docker rm demo1

    10. Docker rmi

    도커 이미지를 삭제하는 커맨드입니다.

    docker rmi --help

    아래 명령어를 통해 현재 어떤 이미지들이 로컬에 있는지 확인합니다.

    docker images

    다음과 같이 출력됩니다.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    busybox latest a8440bba1bc0 32 hours ago 1.41MB
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    busybox 이미지를 삭제해 보겠습니다.

    docker rmi busybox

    다시 docker images를 칠 경우 다음과 같이 나옵니다.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    References

    + \ No newline at end of file diff --git a/docs/prerequisites/docker/images/index.html b/docs/prerequisites/docker/images/index.html index e42db555..fc14aaf8 100644 --- a/docs/prerequisites/docker/images/index.html +++ b/docs/prerequisites/docker/images/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 비슷한 역할을 하는 명령어로 ENTRYPOINT 가 있습니다. 이 둘의 차이에 대해서는 뒤에서 다룹니다.
    하나의 도커 이미지에서는 하나의 CMD 만 실행할 수 있다는 점에서 RUN 명령어와 다릅니다.

    CMD <command>
    CMD ["executable-command", "parameter1", "parameter2"]
    CMD ["parameter1", "parameter2"] # ENTRYPOINT 와 함께 사용될 때

    # 예시
    CMD python main.py

    WORKDIR

    이후 추가될 명령어를 컨테이너 내의 어떤 디렉토리에서 수행할 것인지를 명시하는 명령어입니다.
    만약, 해당 디렉토리가 없다면 생성합니다.

    WORKDIR /path/to/workdir

    # 예시
    WORKDIR /home/demo
    RUN pwd # /home/demo 가 출력됨

    ENV

    컨테이너 내부에서 지속적으로 사용될 environment variable 의 값을 설정하는 명령어입니다.

    ENV <KEY> <VALUE>
    ENV <KEY>=<VALUE>

    # 예시
    # default 언어 설정
    RUN locale-gen ko_KR.UTF-8
    ENV LANG ko_KR.UTF-8
    ENV LANGUAGE ko_KR.UTF-8
    ENV LC_ALL ko_KR.UTF-8

    EXPOSE

    컨테이너에서 뚫어줄 포트/프로토콜을 지정할 수 있습니다.
    -<protocol> 을 지정하지 않으면 TCP 가 디폴트로 설정됩니다.

    EXPOSE <port>
    EXPOSE <port>/<protocol>

    # 예시
    EXPOSE 8080

    3. 간단한 Dockerfile 작성해보기

    vim Dockerfile 혹은 vscode 등 본인이 사용하는 편집기로 Dockerfile 을 열어 다음과 같이 작성해줍니다.

    # base image 를 ubuntu 18.04 로 설정합니다.
    FROM ubuntu:18.04

    # apt-get update 명령을 실행합니다.
    RUN apt-get update

    # TEST env var의 값을 hello 로 지정합니다.
    ENV TEST hello

    # DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
    CMD echo $TEST

    4. Docker build from Dockerfile

    docker build 명령어로 Dockerfile 로부터 Docker Image 를 만들어봅니다.

    docker build --help

    Dockerfile 이 있는 경로에서 다음 명령을 실행합니다.

    docker build -t my-image:v1.0.0 .

    위 커맨드를 설명하면 다음과 같습니다.

    • . : 현재 경로에 있는 Dockerfile 로부터
    • -t : my-image 라는 이름과 v1.0.0 이라는 태그이미지
    • 빌드하겠다라는 명령어

    정상적으로 이미지 빌드되었는지 확인해 보겠습니다.

    # grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
    docker images | grep my-image

    정상적으로 수행된다면 다음과 같이 출력됩니다.

    my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

    5. Docker run from Dockerfile

    그럼 이제 방금 빌드한 my-image:v1.0.0 이미지로 docker 컨테이너를 run 해보겠습니다.

    docker run my-image:v1.0.0

    정상적으로 수행된다면 다음과 같이 나옵니다.

    hello

    6. Docker run with env

    이번에는 방금 빌드한 my-image:v1.0.0 이미지를 실행하는 시점에, TEST env var 의 값을 변경하여 docker 컨테이너를 run 해보겠습니다.

    docker run -e TEST=bye my-image:v1.0.0

    정상적으로 수행된다면 다음과 같이 나옵니다.

    bye
    - +<protocol> 을 지정하지 않으면 TCP 가 디폴트로 설정됩니다.

    EXPOSE <port>
    EXPOSE <port>/<protocol>

    # 예시
    EXPOSE 8080

    3. 간단한 Dockerfile 작성해보기

    vim Dockerfile 혹은 vscode 등 본인이 사용하는 편집기로 Dockerfile 을 열어 다음과 같이 작성해줍니다.

    # base image 를 ubuntu 18.04 로 설정합니다.
    FROM ubuntu:18.04

    # apt-get update 명령을 실행합니다.
    RUN apt-get update

    # TEST env var의 값을 hello 로 지정합니다.
    ENV TEST hello

    # DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
    CMD echo $TEST

    4. Docker build from Dockerfile

    docker build 명령어로 Dockerfile 로부터 Docker Image 를 만들어봅니다.

    docker build --help

    Dockerfile 이 있는 경로에서 다음 명령을 실행합니다.

    docker build -t my-image:v1.0.0 .

    위 커맨드를 설명하면 다음과 같습니다.

    • . : 현재 경로에 있는 Dockerfile 로부터
    • -t : my-image 라는 이름과 v1.0.0 이라는 태그이미지
    • 빌드하겠다라는 명령어

    정상적으로 이미지 빌드되었는지 확인해 보겠습니다.

    # grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
    docker images | grep my-image

    정상적으로 수행된다면 다음과 같이 출력됩니다.

    my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

    5. Docker run from Dockerfile

    그럼 이제 방금 빌드한 my-image:v1.0.0 이미지로 docker 컨테이너를 run 해보겠습니다.

    docker run my-image:v1.0.0

    정상적으로 수행된다면 다음과 같이 나옵니다.

    hello

    6. Docker run with env

    이번에는 방금 빌드한 my-image:v1.0.0 이미지를 실행하는 시점에, TEST env var 의 값을 변경하여 docker 컨테이너를 run 해보겠습니다.

    docker run -e TEST=bye my-image:v1.0.0

    정상적으로 수행된다면 다음과 같이 나옵니다.

    bye
    + \ No newline at end of file diff --git a/docs/prerequisites/docker/index.html b/docs/prerequisites/docker/index.html index 818bade4..2e0afdce 100644 --- a/docs/prerequisites/docker/index.html +++ b/docs/prerequisites/docker/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    What is Docker?

    컨테이너

    • 컨테이너 가상화
      • 어플리케이션을 어디에서나 동일하게 실행하는 기술
    • 컨테이너 이미지
      • 어플리케이션을 실행시키기 위해 필요한 모든 파일들의 집합
      • → 붕어빵 틀
    • 컨테이너란?
      • 컨테이너 이미지를 기반으로 실행된 한 개의 프로세스
      • → 붕어빵 틀로 찍어낸 붕어빵

    도커

    도커는 컨테이너를 관리하고 사용할 수 있게 해주는 플랫폼입니다.
    이러한 도커의 슬로건은 바로 Build Once, Run Anywhere 로 어디에서나 동일한 실행 결과를 보장합니다.

    도커 내부에서 동작하는 과정을 보자면 실제로 container 를 위한 리소스를 분리하고, lifecycle 을 제어하는 기능은 linux kernel 의 cgroup 등이 수행합니다. 하지만 이러한 인터페이스를 바로 사용하는 것은 너무 어렵기 때문에 다음과 같은 추상화 layer를 만들게 됩니다.

    docker-layer.png

    이를 통해 사용자는 사용자 친화적인 API 인 Docker CLI 만으로 쉽게 컨테이너를 제어할 수 있습니다.

    Layer 해석

    위에서 나온 layer들의 역할은 다음과 같습니다.

    1. runC: linux kernel 의 기능을 직접 사용해서, container 라는 하나의 프로세스가 사용할 네임스페이스와 cpu, memory, filesystem 등을 격리시켜주는 기능을 수행합니다.
    2. containerd: runC(OCI layer) 에게 명령을 내리기 위한 추상화 단계이며, 표준화된 인터페이스(OCI)를 사용합니다.
    3. dockerd: containerd 에게 명령을 내리는 역할만 합니다.
    4. docker cli: 사용자는 docker cli 로 dockerd (Docker daemon)에게 명령을 내리기만 하면 됩니다.
      • 이 통신 과정에서 unix socket 을 사용하기 때문에 가끔 도커 관련 에러가 나면 /var/run/docker.sock 가 사용 중이다, 권한이 없다 등등의 에러 메시지가 나오는 것입니다.

    이처럼 도커는 많은 단계를 감싸고 있지만, 흔히 도커라는 용어를 사용할 때는 Docker CLI 를 말할 때도 있고, Dockerd 를 말할 때도 있고 Docker Container 하나를 말할 때도 있어서 혼란이 생길 수 있습니다.
    -앞으로 나오는 글에서도 도커가 여러가지 의미로 쓰일 수 있습니다.

    For ML Engineer

    머신러닝 엔지니어가 도커를 사용하는 이유는 다음과 같습니다.

    1. 나의 ML 학습/추론 코드를 OS, python version, python 환경, 특정 python package 버전에 independent 하도록 해야 한다.
    2. 그래서 코드 뿐만이 아닌 해당 코드가 실행되기 위해 필요한 모든 종속적인 패키지, 환경 변수, 폴더명 등등을 하나의 패키지로 묶을 수 있는 기술이 컨테이너화 기술이다.
    3. 이 기술을 쉽게 사용하고 관리할 수 있는 소프트웨어 중 하나가 도커이며, 패키지를 도커 이미지라고 부른다.
    - +앞으로 나오는 글에서도 도커가 여러가지 의미로 쓰일 수 있습니다.

    For ML Engineer

    머신러닝 엔지니어가 도커를 사용하는 이유는 다음과 같습니다.

    1. 나의 ML 학습/추론 코드를 OS, python version, python 환경, 특정 python package 버전에 independent 하도록 해야 한다.
    2. 그래서 코드 뿐만이 아닌 해당 코드가 실행되기 위해 필요한 모든 종속적인 패키지, 환경 변수, 폴더명 등등을 하나의 패키지로 묶을 수 있는 기술이 컨테이너화 기술이다.
    3. 이 기술을 쉽게 사용하고 관리할 수 있는 소프트웨어 중 하나가 도커이며, 패키지를 도커 이미지라고 부른다.
    + \ No newline at end of file diff --git a/docs/prerequisites/docker/install/index.html b/docs/prerequisites/docker/install/index.html index 01c3841d..aea2803e 100644 --- a/docs/prerequisites/docker/install/index.html +++ b/docs/prerequisites/docker/install/index.html @@ -7,15 +7,15 @@ - +
    버전: 1.0

    Install Docker

    Docker

    도커 실습을 위해 도커를 설치해야 합니다.
    도커 설치는 어떤 OS를 사용하는지에 따라 달라집니다.
    -각 환경에 맞는 도커 설치는 공식 홈페이지를 참고해주세요.

    설치 확인

    docker run hello-world 가 정상적으로 수행되는 OS, 터미널 환경이 필요합니다.

    OSDocker EngineTerminal
    MacOSDocker Desktopzsh
    WindowsDocker DesktopPowershell
    WindowsDocker DesktopWSL2
    UbuntuDocker Enginebash

    들어가기 앞서서..

    MLOps를 사용하기 위해 필요한 도커 사용법을 설명하니 많은 비유와 예시가 MLOps 쪽으로 치중되어 있을 수 있습니다.

    - +각 환경에 맞는 도커 설치는 공식 홈페이지를 참고해주세요.

    설치 확인

    docker run hello-world 가 정상적으로 수행되는 OS, 터미널 환경이 필요합니다.

    OSDocker EngineTerminal
    MacOSDocker Desktopzsh
    WindowsDocker DesktopPowershell
    WindowsDocker DesktopWSL2
    UbuntuDocker Enginebash

    들어가기 앞서서..

    MLOps를 사용하기 위해 필요한 도커 사용법을 설명하니 많은 비유와 예시가 MLOps 쪽으로 치중되어 있을 수 있습니다.

    + \ No newline at end of file diff --git a/docs/prerequisites/docker/introduction/index.html b/docs/prerequisites/docker/introduction/index.html index 51c65c4c..2c2a781d 100644 --- a/docs/prerequisites/docker/introduction/index.html +++ b/docs/prerequisites/docker/introduction/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 따라서 도커 버전이 업데이트될 때마다 Docker Engine 의 인터페이스가 변경되어 쿠버네티스에서 크게 영향을 받는 일이 계속해서 발생하였습니다.

    Open Container Initiative

    그래서 이런 불편함을 해소하고자, 도커를 중심으로 구글 등 컨테이너 기술에 관심있는 여러 집단들이 한데 모여 Open Container Initiative, 이하 OCI라는 프로젝트를 시작하여 컨테이너에 관한 표준을 정하는 일들을 시작하였습니다.
    도커에서도 인터페이스를 한 번 더 분리해서, OCI 표준을 준수하는 containerd라는 Container Runtime 를 개발하고, dockerd 가 containerd 의 API 를 호출하도록 추상화 레이어를 추가하였습니다.

    이러한 흐름에 맞추어서 쿠버네티스에서도 이제부터는 도커만을 지원하지 않고, OCI 표준을 준수하고, 정해진 스펙을 지키는 컨테이너 런타임은 무엇이든 쿠버네티스에서 사용할 수 있도록, Container Runtime Interface, 이하 CRI 스펙을 버전 1.5부터 제공하기 시작했습니다.

    CRI-O

    Red Hat, Intel, SUSE, IBM에서 OCI 표준+CRI 스펙을 따라 Kubernetes 전용 Container Runtime 을 목적으로 개발한 컨테이너 런타임입니다.

    지금의 도커 & 쿠버네티스

    쿠버네티스는 Docker Engine 을 디폴트 컨테이너 런타임으로 사용해왔지만, 도커의 API 가 CRI 스펙에 맞지 않아(OCI 는 따름) 도커의 API를 CRI와 호환되게 바꿔주는 dockershim을 쿠버네티스 자체적으로 개발 및 지원해왔었는데,(도커 측이 아니라 쿠버네티스 측에서 지원했다는 점이 굉장히 큰 짐이었습니다.) 이걸 쿠버네티스 v1.20 부터는 Deprecated하고, v1.23 부터는 지원을 포기하기로 결정하였습니다.

    • v1.23 은 2021 년 12월 릴리즈

    그래서 쿠버네티스 v1.23 부터는 도커를 native 하게 쓸 수 없습니다다.
    그렇지만 사용자들은 이런 변화에 크게 관련이 있진 않습니다. -왜냐하면 Docker Engine을 통해 만들어진 도커 이미지는 OCI 표준을 준수하기 때문에, 쿠버네티스가 어떤 컨테이너 런타임으로 이루어져있든 사용 가능하기 때문입니다.

    References

    - +왜냐하면 Docker Engine을 통해 만들어진 도커 이미지는 OCI 표준을 준수하기 때문에, 쿠버네티스가 어떤 컨테이너 런타임으로 이루어져있든 사용 가능하기 때문입니다.

    References

    + \ No newline at end of file diff --git a/docs/setup-components/install-components-kf/index.html b/docs/setup-components/install-components-kf/index.html index 88140bef..8c1bd246 100644 --- a/docs/setup-components/install-components-kf/index.html +++ b/docs/setup-components/install-components-kf/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    1. Kubeflow

    설치 파일 준비

    Kubeflow v1.4.0 버전을 설치하기 위해서, 설치에 필요한 manifests 파일들을 준비합니다.

    kubeflow/manifests Repositoryv1.4.0 태그로 깃 클론한 뒤, 해당 폴더로 이동합니다.

    git clone -b v1.4.0 https://github.com/kubeflow/manifests.git
    cd manifests

    각 구성 요소별 설치

    kubeflow/manifests Repository 에 각 구성 요소별 설치 커맨드가 적혀져 있지만, 설치하며 발생할 수 있는 이슈 혹은 정상적으로 설치되었는지 확인하는 방법이 적혀져 있지 않아 처음 설치하는 경우 어려움을 겪는 경우가 많습니다.
    따라서, 각 구성 요소별로 정상적으로 설치되었는지 확인하는 방법을 함께 작성합니다.

    또한, 본 문서에서는 모두의 MLOps 에서 다루지 않는 구성요소인 Knative, KFServing, MPI Operator 의 설치는 리소스의 효율적 사용을 위해 따로 설치하지 않습니다.

    Cert-manager

    1. cert-manager 를 설치합니다.

      kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -

      정상적으로 설치되면 다음과 같이 출력됩니다.

      namespace/cert-manager created
      customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created
      customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created
      customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created
      customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created
      customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created
      customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created
      serviceaccount/cert-manager created
      serviceaccount/cert-manager-cainjector created
      serviceaccount/cert-manager-webhook created
      role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
      role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
      role.rbac.authorization.k8s.io/cert-manager:leaderelection created
      clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created
      clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
      clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created
      clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created
      clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
      clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
      clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created
      clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created
      clusterrole.rbac.authorization.k8s.io/cert-manager-edit created
      clusterrole.rbac.authorization.k8s.io/cert-manager-view created
      clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
      rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
      rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
      rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created
      clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
      service/cert-manager created
      service/cert-manager-webhook created
      deployment.apps/cert-manager created
      deployment.apps/cert-manager-cainjector created
      deployment.apps/cert-manager-webhook created
      mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created
      validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created

      cert-manager namespace 의 3 개의 pod 가 모두 Running 이 될 때까지 기다립니다.

      kubectl get pod -n cert-manager

      모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

      NAME                                       READY   STATUS    RESTARTS   AGE
      cert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s
      cert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s
      cert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s
    2. kubeflow-issuer 를 설치합니다.

      kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -

      정상적으로 설치되면 다음과 같이 출력됩니다.

      clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created
    • cert-manager-webhook 이슈

      cert-manager-webhook deployment 가 Running 이 아닌 경우, 다음과 비슷한 에러가 발생하며 kubeflow-issuer가 설치되지 않을 수 있음에 주의하시기 바랍니다.
      해당 에러가 발생한 경우, cert-manager 의 3개의 pod 가 모두 Running 이 되는 것을 확인한 이후 다시 명령어를 수행하시기 바랍니다.

      Error from server: error when retrieving current configuration of:
      Resource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"
      Name: "kubeflow-self-signing-issuer", Namespace: ""
      from server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused

    Istio

    1. istio 관련 Custom Resource Definition(CRD) 를 설치합니다.

      kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created
      customresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created
      customresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created
      customresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created
      customresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created
      customresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created
      customresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created
      customresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created
      customresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created
      customresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created
      customresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created
      customresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created
    2. istio namespace 를 설치합니다.

      kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      namespace/istio-system created
    3. istio 를 설치합니다.

      kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      serviceaccount/istio-ingressgateway-service-account created
      serviceaccount/istio-reader-service-account created
      serviceaccount/istiod-service-account created
      role.rbac.authorization.k8s.io/istio-ingressgateway-sds created
      role.rbac.authorization.k8s.io/istiod-istio-system created
      clusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created
      clusterrole.rbac.authorization.k8s.io/istiod-istio-system created
      rolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created
      rolebinding.rbac.authorization.k8s.io/istiod-istio-system created
      clusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created
      clusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created
      configmap/istio created
      configmap/istio-sidecar-injector created
      service/istio-ingressgateway created
      service/istiod created
      deployment.apps/istio-ingressgateway created
      deployment.apps/istiod created
      envoyfilter.networking.istio.io/metadata-exchange-1.8 created
      envoyfilter.networking.istio.io/metadata-exchange-1.9 created
      envoyfilter.networking.istio.io/stats-filter-1.8 created
      envoyfilter.networking.istio.io/stats-filter-1.9 created
      envoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created
      envoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created
      envoyfilter.networking.istio.io/tcp-stats-filter-1.8 created
      envoyfilter.networking.istio.io/tcp-stats-filter-1.9 created
      envoyfilter.networking.istio.io/x-forwarded-host created
      gateway.networking.istio.io/istio-ingressgateway created
      authorizationpolicy.security.istio.io/global-deny-all created
      authorizationpolicy.security.istio.io/istio-ingressgateway created
      mutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created
      validatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created

      istio-system namespace 의 2 개의 pod 가 모두 Running 이 될 때까지 기다립니다.

      kubectl get po -n istio-system

      모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

      NAME                                   READY   STATUS    RESTARTS   AGE
      istio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s
      istiod-86457659bb-5h58w 1/1 Running 0 16s

    Dex

    dex 를 설치합니다.

    kustomize build common/dex/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    namespace/auth created
    customresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created
    serviceaccount/dex created
    clusterrole.rbac.authorization.k8s.io/dex created
    clusterrolebinding.rbac.authorization.k8s.io/dex created
    configmap/dex created
    secret/dex-oidc-client created
    service/dex created
    deployment.apps/dex created
    virtualservice.networking.istio.io/dex created

    auth namespace 의 1 개의 pod 가 모두 Running 이 될 때까지 기다립니다.

    kubectl get po -n auth

    모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

    NAME                   READY   STATUS    RESTARTS   AGE
    dex-5ddf47d88d-458cs 1/1 Running 1 12s

    OIDC AuthService

    OIDC AuthService 를 설치합니다.

    kustomize build common/oidc-authservice/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    configmap/oidc-authservice-parameters created
    secret/oidc-authservice-client created
    service/authservice created
    persistentvolumeclaim/authservice-pvc created
    statefulset.apps/authservice created
    envoyfilter.networking.istio.io/authn-filter created

    istio-system namespace 에 authservice-0 pod 가 Running 이 될 때까지 기다립니다.

    kubectl get po -n istio-system -w

    모두 Running 이 되면 다음과 비슷한 결과가 출력됩니다.

    NAME                                   READY   STATUS    RESTARTS   AGE
    authservice-0 1/1 Running 0 14s
    istio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s
    istiod-86457659bb-5h58w 1/1 Running 0 2m37s

    Kubeflow Namespace

    kubeflow namespace 를 생성합니다.

    kustomize build common/kubeflow-namespace/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    namespace/kubeflow created

    kubeflow namespace 를 조회합니다.

    kubectl get ns kubeflow

    정상적으로 생성되면 다음과 비슷한 결과가 출력됩니다.

    NAME       STATUS   AGE
    kubeflow Active 8s

    Kubeflow Roles

    kubeflow-roles 를 설치합니다.

    kustomize build common/kubeflow-roles/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    clusterrole.rbac.authorization.k8s.io/kubeflow-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created
    clusterrole.rbac.authorization.k8s.io/kubeflow-view created

    방금 생성한 kubeflow roles 를 조회합니다.

    kubectl get clusterrole | grep kubeflow

    다음과 같이 총 6개의 clusterrole 이 출력됩니다.

    kubeflow-admin                                                         2021-12-03T08:51:36Z
    kubeflow-edit 2021-12-03T08:51:36Z
    kubeflow-kubernetes-admin 2021-12-03T08:51:36Z
    kubeflow-kubernetes-edit 2021-12-03T08:51:36Z
    kubeflow-kubernetes-view 2021-12-03T08:51:36Z
    kubeflow-view 2021-12-03T08:51:36Z

    Kubeflow Istio Resources

    kubeflow-istio-resources 를 설치합니다.

    kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created
    gateway.networking.istio.io/kubeflow-gateway created

    방금 생성한 kubeflow roles 를 조회합니다.

    kubectl get clusterrole | grep kubeflow-istio

    다음과 같이 총 3개의 clusterrole 이 출력됩니다.

    kubeflow-istio-admin                                                   2021-12-03T08:53:17Z
    kubeflow-istio-edit 2021-12-03T08:53:17Z
    kubeflow-istio-view 2021-12-03T08:53:17Z

    Kubeflow namespace 에 gateway 가 정상적으로 설치되었는지 확인합니다.

    kubectl get gateway -n kubeflow

    정상적으로 생성되면 다음과 비슷한 결과가 출력됩니다.

    NAME               AGE
    kubeflow-gateway 31s

    Kubeflow Pipelines

    kubeflow pipelines 를 설치합니다.

    kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created
    customresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created
    customresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created
    ...(생략)
    authorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created
    authorizationpolicy.security.istio.io/mysql created
    authorizationpolicy.security.istio.io/service-cache-server created

    위 명령어는 여러 resources 를 한 번에 설치하고 있지만, 설치 순서의 의존성이 있는 리소스가 존재합니다.
    -따라서 때에 따라 다음과 비슷한 에러가 발생할 수 있습니다.

    "error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1""  

    위와 비슷한 에러가 발생한다면, 10 초 정도 기다린 뒤 다시 위의 명령을 수행합니다.

    kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow

    다음과 같이 총 16개의 pod 가 모두 Running 이 될 때까지 기다립니다.

    NAME                                                     READY   STATUS    RESTARTS   AGE
    cache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s
    cache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s
    kubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s
    metacontroller-0 1/1 Running 0 5m3s
    metadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s
    metadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s
    metadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s
    minio-5b65df66c9-k5gzg 2/2 Running 0 5m3s
    ml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s
    ml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s
    ml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s
    ml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s
    ml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s
    ml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s
    mysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s
    workflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s

    추가로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80

    웹 브라우저를 열어 http://localhost:8888/#/pipelines/ 경로에 접속합니다.

    다음과 같은 화면이 출력되는 것을 확인합니다.

    pipeline-ui

    • localhost 연결 거부 이슈

    localhost-reject

    만약 다음과 같이 localhost에서 연결을 거부했습니다 라는 에러가 출력될 경우, 커맨드로 address 설정을 통해 접근하는 것이 가능합니다.

    보안상의 문제가 되지 않는다면, 아래와 같이 0.0.0.0 로 모든 주소의 bind를 열어주는 방향으로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80
    • 위의 옵션으로 실행했음에도 여전히 연결 거부 이슈가 발생할 경우

    방화벽 설정으로 접속해 모든 tcp 프로토콜의 포트에 대한 접속을 허가 또는 8888번 포트의 접속 허가를 추가해 접근 권한을 허가해줍니다.

    웹 브라우저를 열어 http://<당신의 가상 인스턴스 공인 ip 주소>:8888/#/pipelines/ 경로에 접속하면, ml-pipeline UI 화면이 출력되는 것을 확인할 수 있습니다.

    하단에서 진행되는 다른 포트의 경로에 접속할 때도 위의 절차와 동일하게 커맨드를 실행하고, 방화벽에 포트 번호를 추가해주면 실행하는 것이 가능합니다.

    Katib

    Katib 를 설치합니다.

    kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created
    serviceaccount/katib-controller created
    serviceaccount/katib-ui created
    clusterrole.rbac.authorization.k8s.io/katib-controller created
    clusterrole.rbac.authorization.k8s.io/katib-ui created
    clusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created
    clusterrolebinding.rbac.authorization.k8s.io/katib-controller created
    clusterrolebinding.rbac.authorization.k8s.io/katib-ui created
    configmap/katib-config created
    configmap/trial-templates created
    secret/katib-mysql-secrets created
    service/katib-controller created
    service/katib-db-manager created
    service/katib-mysql created
    service/katib-ui created
    persistentvolumeclaim/katib-mysql created
    deployment.apps/katib-controller created
    deployment.apps/katib-db-manager created
    deployment.apps/katib-mysql created
    deployment.apps/katib-ui created
    certificate.cert-manager.io/katib-webhook-cert created
    issuer.cert-manager.io/katib-selfsigned-issuer created
    virtualservice.networking.istio.io/katib-ui created
    mutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created
    validatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep katib

    다음과 같이 총 4 개의 pod 가 Running 이 될 때까지 기다립니다.

    katib-controller-68c47fbf8b-b985z                        1/1     Running   0          82s
    katib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s
    katib-mysql-7894994f88-scs62 1/1 Running 0 82s
    katib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s

    추가로 katib UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward svc/katib-ui -n kubeflow 8081:80

    웹 브라우저를 열어 http://localhost:8081/katib/ 경로에 접속합니다.

    다음과 같은 화면이 출력되는 것을 확인합니다.

    katib-ui

    Central Dashboard

    Dashboard 를 설치합니다.

    kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/centraldashboard created
    role.rbac.authorization.k8s.io/centraldashboard created
    clusterrole.rbac.authorization.k8s.io/centraldashboard created
    rolebinding.rbac.authorization.k8s.io/centraldashboard created
    clusterrolebinding.rbac.authorization.k8s.io/centraldashboard created
    configmap/centraldashboard-config created
    configmap/centraldashboard-parameters created
    service/centraldashboard created
    deployment.apps/centraldashboard created
    virtualservice.networking.istio.io/centraldashboard created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep centraldashboard

    kubeflow namespace 에 centraldashboard 관련 1 개의 pod 가 Running 이 될 때까지 기다립니다.

    centraldashboard-8fc7d8cc-xl7ts                          1/1     Running   0          52s

    추가로 Central Dashboard UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward svc/centraldashboard -n kubeflow 8082:80

    웹 브라우저를 열어 http://localhost:8082/ 경로에 접속합니다.

    다음과 같은 화면이 출력되는 것을 확인합니다.

    central-dashboard

    Admission Webhook

    kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created
    serviceaccount/admission-webhook-service-account created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created
    clusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created
    service/admission-webhook-service created
    deployment.apps/admission-webhook-deployment created
    certificate.cert-manager.io/admission-webhook-cert created
    issuer.cert-manager.io/admission-webhook-selfsigned-issuer created
    mutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep admission-webhook

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    admission-webhook-deployment-667bd68d94-2hhrx            1/1     Running   0          11s

    Notebooks & Jupyter Web App

    1. Notebook controller 를 설치합니다.

      kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created
      serviceaccount/notebook-controller-service-account created
      role.rbac.authorization.k8s.io/notebook-controller-leader-election-role created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-role created
      rolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created
      clusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created
      configmap/notebook-controller-config-m44cmb547t created
      service/notebook-controller-service created
      deployment.apps/notebook-controller-deployment created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep notebook-controller

      1 개의 pod 가 Running 이 될 때까지 기다립니다.

      notebook-controller-deployment-75b4f7b578-w4d4l          1/1     Running   0          105s
    2. Jupyter Web App 을 설치합니다.

      kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      serviceaccount/jupyter-web-app-service-account created
      role.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created
      rolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created
      clusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created
      configmap/jupyter-web-app-config-76844k4cd7 created
      configmap/jupyter-web-app-logos created
      configmap/jupyter-web-app-parameters-chmg88cm48 created
      service/jupyter-web-app-service created
      deployment.apps/jupyter-web-app-deployment created
      virtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep jupyter-web-app

      1개의 pod 가 Running 이 될 때까지 기다립니다.

      jupyter-web-app-deployment-6f744fbc54-p27ts              1/1     Running   0          2m

    Profiles + KFAM

    Profile Controller를 설치합니다.

    kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created
    serviceaccount/profiles-controller-service-account created
    role.rbac.authorization.k8s.io/profiles-leader-election-role created
    rolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created
    configmap/namespace-labels-data-48h7kd55mc created
    configmap/profiles-config-46c7tgh6fd created
    service/profiles-kfam created
    deployment.apps/profiles-deployment created
    virtualservice.networking.istio.io/profiles-kfam created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep profiles-deployment

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    profiles-deployment-89f7d88b-qsnrd                       2/2     Running   0          42s

    Volumes Web App

    Volumes Web App 을 설치합니다.

    kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/volumes-web-app-service-account created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created
    clusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created
    configmap/volumes-web-app-parameters-4gg8cm2gmk created
    service/volumes-web-app-service created
    deployment.apps/volumes-web-app-deployment created
    virtualservice.networking.istio.io/volumes-web-app-volumes-web-app created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep volumes-web-app

    1개의 pod가 Running 이 될 때까지 기다립니다.

    volumes-web-app-deployment-8589d664cc-62svl              1/1     Running   0          27s

    Tensorboard & Tensorboard Web App

    1. Tensorboard Web App 를 설치합니다.

      kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      serviceaccount/tensorboards-web-app-service-account created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created
      clusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created
      configmap/tensorboards-web-app-parameters-g28fbd6cch created
      service/tensorboards-web-app-service created
      deployment.apps/tensorboards-web-app-deployment created
      virtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep tensorboards-web-app

      1 개의 pod 가 Running 이 될 때까지 기다립니다.

      tensorboards-web-app-deployment-6ff79b7f44-qbzmw            1/1     Running             0          22s
    2. Tensorboard Controller 를 설치합니다.

      kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created
      serviceaccount/tensorboard-controller created
      role.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created
      clusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created
      clusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created
      rolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created
      clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created
      clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created
      configmap/tensorboard-controller-config-bf88mm96c8 created
      service/tensorboard-controller-controller-manager-metrics-service created
      deployment.apps/tensorboard-controller-controller-manager created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep tensorboard-controller

      1 개의 pod 가 Running 이 될 때까지 기다립니다.

      tensorboard-controller-controller-manager-954b7c544-vjpzj   3/3     Running   1          73s

    Training Operator

    Training Operator 를 설치합니다.

    kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created
    serviceaccount/training-operator created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-view created
    clusterrole.rbac.authorization.k8s.io/training-operator created
    clusterrolebinding.rbac.authorization.k8s.io/training-operator created
    service/training-operator created
    deployment.apps/training-operator created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep training-operator

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    training-operator-7d98f9dd88-6887f                          1/1     Running   0          28s

    User Namespace

    Kubeflow 사용을 위해, 사용할 User의 Kubeflow Profile 을 생성합니다.

    kustomize build common/user-namespace/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    configmap/default-install-config-9h2h2b6hbk created
    profile.kubeflow.org/kubeflow-user-example-com created

    kubeflow-user-example-com profile 이 생성된 것을 확인합니다.

    kubectl get profile
    kubeflow-user-example-com   37s

    정상 설치 확인

    Kubeflow central dashboard에 web browser로 접속하기 위해 포트 포워딩합니다.

    kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

    Web Browser 를 열어 http://localhost:8080 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

    login-ui

    다음 접속 정보를 입력하여 접속합니다.

    • Email Address: user@example.com
    • Password: 12341234

    central-dashboard

    - +따라서 때에 따라 다음과 비슷한 에러가 발생할 수 있습니다.

    "error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1""  

    위와 비슷한 에러가 발생한다면, 10 초 정도 기다린 뒤 다시 위의 명령을 수행합니다.

    kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow

    다음과 같이 총 16개의 pod 가 모두 Running 이 될 때까지 기다립니다.

    NAME                                                     READY   STATUS    RESTARTS   AGE
    cache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s
    cache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s
    kubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s
    metacontroller-0 1/1 Running 0 5m3s
    metadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s
    metadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s
    metadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s
    minio-5b65df66c9-k5gzg 2/2 Running 0 5m3s
    ml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s
    ml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s
    ml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s
    ml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s
    ml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s
    ml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s
    mysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s
    workflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s

    추가로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80

    웹 브라우저를 열어 http://localhost:8888/#/pipelines/ 경로에 접속합니다.

    다음과 같은 화면이 출력되는 것을 확인합니다.

    pipeline-ui

    • localhost 연결 거부 이슈

    localhost-reject

    만약 다음과 같이 localhost에서 연결을 거부했습니다 라는 에러가 출력될 경우, 커맨드로 address 설정을 통해 접근하는 것이 가능합니다.

    보안상의 문제가 되지 않는다면, 아래와 같이 0.0.0.0 로 모든 주소의 bind를 열어주는 방향으로 ml-pipeline UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80
    • 위의 옵션으로 실행했음에도 여전히 연결 거부 이슈가 발생할 경우

    방화벽 설정으로 접속해 모든 tcp 프로토콜의 포트에 대한 접속을 허가 또는 8888번 포트의 접속 허가를 추가해 접근 권한을 허가해줍니다.

    웹 브라우저를 열어 http://<당신의 가상 인스턴스 공인 ip 주소>:8888/#/pipelines/ 경로에 접속하면, ml-pipeline UI 화면이 출력되는 것을 확인할 수 있습니다.

    하단에서 진행되는 다른 포트의 경로에 접속할 때도 위의 절차와 동일하게 커맨드를 실행하고, 방화벽에 포트 번호를 추가해주면 실행하는 것이 가능합니다.

    Katib

    Katib 를 설치합니다.

    kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created
    serviceaccount/katib-controller created
    serviceaccount/katib-ui created
    clusterrole.rbac.authorization.k8s.io/katib-controller created
    clusterrole.rbac.authorization.k8s.io/katib-ui created
    clusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created
    clusterrolebinding.rbac.authorization.k8s.io/katib-controller created
    clusterrolebinding.rbac.authorization.k8s.io/katib-ui created
    configmap/katib-config created
    configmap/trial-templates created
    secret/katib-mysql-secrets created
    service/katib-controller created
    service/katib-db-manager created
    service/katib-mysql created
    service/katib-ui created
    persistentvolumeclaim/katib-mysql created
    deployment.apps/katib-controller created
    deployment.apps/katib-db-manager created
    deployment.apps/katib-mysql created
    deployment.apps/katib-ui created
    certificate.cert-manager.io/katib-webhook-cert created
    issuer.cert-manager.io/katib-selfsigned-issuer created
    virtualservice.networking.istio.io/katib-ui created
    mutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created
    validatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep katib

    다음과 같이 총 4 개의 pod 가 Running 이 될 때까지 기다립니다.

    katib-controller-68c47fbf8b-b985z                        1/1     Running   0          82s
    katib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s
    katib-mysql-7894994f88-scs62 1/1 Running 0 82s
    katib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s

    추가로 katib UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward svc/katib-ui -n kubeflow 8081:80

    웹 브라우저를 열어 http://localhost:8081/katib/ 경로에 접속합니다.

    다음과 같은 화면이 출력되는 것을 확인합니다.

    katib-ui

    Central Dashboard

    Dashboard 를 설치합니다.

    kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/centraldashboard created
    role.rbac.authorization.k8s.io/centraldashboard created
    clusterrole.rbac.authorization.k8s.io/centraldashboard created
    rolebinding.rbac.authorization.k8s.io/centraldashboard created
    clusterrolebinding.rbac.authorization.k8s.io/centraldashboard created
    configmap/centraldashboard-config created
    configmap/centraldashboard-parameters created
    service/centraldashboard created
    deployment.apps/centraldashboard created
    virtualservice.networking.istio.io/centraldashboard created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep centraldashboard

    kubeflow namespace 에 centraldashboard 관련 1 개의 pod 가 Running 이 될 때까지 기다립니다.

    centraldashboard-8fc7d8cc-xl7ts                          1/1     Running   0          52s

    추가로 Central Dashboard UI가 정상적으로 접속되는지 확인합니다.

    kubectl port-forward svc/centraldashboard -n kubeflow 8082:80

    웹 브라우저를 열어 http://localhost:8082/ 경로에 접속합니다.

    다음과 같은 화면이 출력되는 것을 확인합니다.

    central-dashboard

    Admission Webhook

    kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created
    serviceaccount/admission-webhook-service-account created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created
    clusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created
    clusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created
    service/admission-webhook-service created
    deployment.apps/admission-webhook-deployment created
    certificate.cert-manager.io/admission-webhook-cert created
    issuer.cert-manager.io/admission-webhook-selfsigned-issuer created
    mutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep admission-webhook

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    admission-webhook-deployment-667bd68d94-2hhrx            1/1     Running   0          11s

    Notebooks & Jupyter Web App

    1. Notebook controller 를 설치합니다.

      kustomize build apps/jupyter/notebook-controller/upstream/overlays/kubeflow | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      customresourcedefinition.apiextensions.k8s.io/notebooks.kubeflow.org created
      serviceaccount/notebook-controller-service-account created
      role.rbac.authorization.k8s.io/notebook-controller-leader-election-role created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view created
      clusterrole.rbac.authorization.k8s.io/notebook-controller-role created
      rolebinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding created
      clusterrolebinding.rbac.authorization.k8s.io/notebook-controller-role-binding created
      configmap/notebook-controller-config-m44cmb547t created
      service/notebook-controller-service created
      deployment.apps/notebook-controller-deployment created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep notebook-controller

      1 개의 pod 가 Running 이 될 때까지 기다립니다.

      notebook-controller-deployment-75b4f7b578-w4d4l          1/1     Running   0          105s
    2. Jupyter Web App 을 설치합니다.

      kustomize build apps/jupyter/jupyter-web-app/upstream/overlays/istio | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      serviceaccount/jupyter-web-app-service-account created
      role.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-cluster-role created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-admin created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-edit created
      clusterrole.rbac.authorization.k8s.io/jupyter-web-app-kubeflow-notebook-ui-view created
      rolebinding.rbac.authorization.k8s.io/jupyter-web-app-jupyter-notebook-role-binding created
      clusterrolebinding.rbac.authorization.k8s.io/jupyter-web-app-cluster-role-binding created
      configmap/jupyter-web-app-config-76844k4cd7 created
      configmap/jupyter-web-app-logos created
      configmap/jupyter-web-app-parameters-chmg88cm48 created
      service/jupyter-web-app-service created
      deployment.apps/jupyter-web-app-deployment created
      virtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep jupyter-web-app

      1개의 pod 가 Running 이 될 때까지 기다립니다.

      jupyter-web-app-deployment-6f744fbc54-p27ts              1/1     Running   0          2m

    Profiles + KFAM

    Profile Controller를 설치합니다.

    kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created
    serviceaccount/profiles-controller-service-account created
    role.rbac.authorization.k8s.io/profiles-leader-election-role created
    rolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created
    configmap/namespace-labels-data-48h7kd55mc created
    configmap/profiles-config-46c7tgh6fd created
    service/profiles-kfam created
    deployment.apps/profiles-deployment created
    virtualservice.networking.istio.io/profiles-kfam created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep profiles-deployment

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    profiles-deployment-89f7d88b-qsnrd                       2/2     Running   0          42s

    Volumes Web App

    Volumes Web App 을 설치합니다.

    kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    serviceaccount/volumes-web-app-service-account created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created
    clusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created
    configmap/volumes-web-app-parameters-4gg8cm2gmk created
    service/volumes-web-app-service created
    deployment.apps/volumes-web-app-deployment created
    virtualservice.networking.istio.io/volumes-web-app-volumes-web-app created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep volumes-web-app

    1개의 pod가 Running 이 될 때까지 기다립니다.

    volumes-web-app-deployment-8589d664cc-62svl              1/1     Running   0          27s

    Tensorboard & Tensorboard Web App

    1. Tensorboard Web App 를 설치합니다.

      kustomize build apps/tensorboard/tensorboards-web-app/upstream/overlays/istio | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      serviceaccount/tensorboards-web-app-service-account created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created
      clusterrole.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created
      clusterrolebinding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created
      configmap/tensorboards-web-app-parameters-g28fbd6cch created
      service/tensorboards-web-app-service created
      deployment.apps/tensorboards-web-app-deployment created
      virtualservice.networking.istio.io/tensorboards-web-app-tensorboards-web-app created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep tensorboards-web-app

      1 개의 pod 가 Running 이 될 때까지 기다립니다.

      tensorboards-web-app-deployment-6ff79b7f44-qbzmw            1/1     Running             0          22s
    2. Tensorboard Controller 를 설치합니다.

      kustomize build apps/tensorboard/tensorboard-controller/upstream/overlays/kubeflow | kubectl apply -f -

      정상적으로 수행되면 다음과 같이 출력됩니다.

      customresourcedefinition.apiextensions.k8s.io/tensorboards.tensorboard.kubeflow.org created
      serviceaccount/tensorboard-controller created
      role.rbac.authorization.k8s.io/tensorboard-controller-leader-election-role created
      clusterrole.rbac.authorization.k8s.io/tensorboard-controller-manager-role created
      clusterrole.rbac.authorization.k8s.io/tensorboard-controller-proxy-role created
      rolebinding.rbac.authorization.k8s.io/tensorboard-controller-leader-election-rolebinding created
      clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-manager-rolebinding created
      clusterrolebinding.rbac.authorization.k8s.io/tensorboard-controller-proxy-rolebinding created
      configmap/tensorboard-controller-config-bf88mm96c8 created
      service/tensorboard-controller-controller-manager-metrics-service created
      deployment.apps/tensorboard-controller-controller-manager created

      정상적으로 설치되었는지 확인합니다.

      kubectl get po -n kubeflow | grep tensorboard-controller

      1 개의 pod 가 Running 이 될 때까지 기다립니다.

      tensorboard-controller-controller-manager-954b7c544-vjpzj   3/3     Running   1          73s

    Training Operator

    Training Operator 를 설치합니다.

    kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created
    serviceaccount/training-operator created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-view created
    clusterrole.rbac.authorization.k8s.io/training-operator created
    clusterrolebinding.rbac.authorization.k8s.io/training-operator created
    service/training-operator created
    deployment.apps/training-operator created

    정상적으로 설치되었는지 확인합니다.

    kubectl get po -n kubeflow | grep training-operator

    1 개의 pod 가 Running 이 될 때까지 기다립니다.

    training-operator-7d98f9dd88-6887f                          1/1     Running   0          28s

    User Namespace

    Kubeflow 사용을 위해, 사용할 User의 Kubeflow Profile 을 생성합니다.

    kustomize build common/user-namespace/base | kubectl apply -f -

    정상적으로 수행되면 다음과 같이 출력됩니다.

    configmap/default-install-config-9h2h2b6hbk created
    profile.kubeflow.org/kubeflow-user-example-com created

    kubeflow-user-example-com profile 이 생성된 것을 확인합니다.

    kubectl get profile
    kubeflow-user-example-com   37s

    정상 설치 확인

    Kubeflow central dashboard에 web browser로 접속하기 위해 포트 포워딩합니다.

    kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

    Web Browser 를 열어 http://localhost:8080 으로 접속하여, 다음과 같은 화면이 출력되는 것을 확인합니다.

    login-ui

    다음 접속 정보를 입력하여 접속합니다.

    • Email Address: user@example.com
    • Password: 12341234

    central-dashboard

    + \ No newline at end of file diff --git a/docs/setup-components/install-components-mlflow/index.html b/docs/setup-components/install-components-mlflow/index.html index 84c82dc0..4f7a2994 100644 --- a/docs/setup-components/install-components-mlflow/index.html +++ b/docs/setup-components/install-components-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ 그래서 MLflow에서 관리하는 데이터를 저장하고 UI를 제공하는 MLflow Tracking Server를 쿠버네티스 클러스터에 배포하여 사용할 예정입니다.

    Before Install MLflow Tracking Server

    PostgreSQL DB 설치

    MLflow Tracking Server가 Backend Store로 사용할 용도의 PostgreSQL DB를 쿠버네티스 클러스터에 배포합니다.

    먼저 mlflow-system이라는 namespace 를 생성합니다.

    kubectl create ns mlflow-system

    다음과 같은 메시지가 출력되면 정상적으로 생성된 것을 의미합니다.

    namespace/mlflow-system created

    postgresql DB를 mlflow-system namespace 에 생성합니다.

    kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml 

    정상적으로 수행되면 다음과 같이 출력됩니다.

    service/postgresql-mlflow-service created
    deployment.apps/postgresql-mlflow created
    persistentvolumeclaim/postgresql-mlflow-pvc created

    mlflow-system namespace 에 1개의 postgresql 관련 pod 가 Running 이 될 때까지 기다립니다.

    kubectl get pod -n mlflow-system | grep postgresql

    다음과 비슷하게 출력되면 정상적으로 실행된 것입니다.

    postgresql-mlflow-7b9bc8c79f-srkh7   1/1     Running   0          38s

    Minio 설정

    MLflow Tracking Server가 Artifacts Store로 사용할 용도의 Minio는 이전 Kubeflow 설치 단계에서 설치한 Minio를 활용합니다.
    단, kubeflow 용도와 mlflow 용도를 분리하기 위해, mlflow 전용 버킷(bucket)을 생성하겠습니다.
    minio 에 접속하여 버킷을 생성하기 위해, 우선 minio-service 를 포트포워딩합니다.

    kubectl port-forward svc/minio-service -n kubeflow 9000:9000

    웹 브라우저를 열어 localhost:9000으로 접속하면 다음과 같은 화면이 출력됩니다.

    minio-install

    다음과 같은 접속 정보를 입력하여 로그인합니다.

    • Username: minio
    • Password: minio123

    우측 하단의 + 버튼을 클릭하여, Create Bucket를 클릭합니다.

    create-bucket

    Bucket Namemlflow를 입력하여 버킷을 생성합니다.

    정상적으로 생성되면 다음과 같이 왼쪽에 mlflow라는 이름의 버킷이 생성됩니다.

    mlflow-bucket


    Let's Install MLflow Tracking Server

    Helm Repository 추가

    helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts

    다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

    "mlops-for-all" has been added to your repositories

    Helm Repository 업데이트

    helm repo update

    다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "mlops-for-all" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    mlflow-server Helm Chart 0.2.0 버전을 설치합니다.

    helm install mlflow-server mlops-for-all/mlflow-server \
    --namespace mlflow-system \
    --version 0.2.0
    • 주의: 위의 helm chart는 MLflow 의 backend store 와 artifacts store 의 접속 정보를 kubeflow 설치 과정에서 생성한 minio와 위의 PostgreSQL DB 설치에서 생성한 postgresql 정보를 default로 하여 설치합니다.
      • 별개로 생성한 DB 혹은 Object storage를 활용하고 싶은 경우, Helm Chart Repo를 참고하여 helm install 시 value를 따로 설정하여 설치하시기 바랍니다.

    다음과 같은 메시지가 출력되어야 합니다.

    NAME: mlflow-server
    LAST DEPLOYED: Sat Dec 18 22:02:13 2021
    NAMESPACE: mlflow-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    정상적으로 설치되었는지 확인합니다.

    kubectl get pod -n mlflow-system | grep mlflow-server

    mlflow-system namespace 에 1 개의 mlflow-server 관련 pod 가 Running 이 될 때까지 기다립니다.
    -다음과 비슷하게 출력되면 정상적으로 실행된 것입니다.

    mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

    정상 설치 확인

    그럼 이제 MLflow Server에 정상적으로 접속되는지 확인해보겠습니다.

    우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    웹 브라우저를 열어 localhost:5000으로 접속하면 다음과 같은 화면이 출력됩니다.

    mlflow-install

    - +다음과 비슷하게 출력되면 정상적으로 실행된 것입니다.

    mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

    정상 설치 확인

    그럼 이제 MLflow Server에 정상적으로 접속되는지 확인해보겠습니다.

    우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    웹 브라우저를 열어 localhost:5000으로 접속하면 다음과 같은 화면이 출력됩니다.

    mlflow-install

    + \ No newline at end of file diff --git a/docs/setup-components/install-components-pg/index.html b/docs/setup-components/install-components-pg/index.html index d56ec6b8..8bdeb4d2 100644 --- a/docs/setup-components/install-components-pg/index.html +++ b/docs/setup-components/install-components-pg/index.html @@ -7,15 +7,15 @@ - +
    버전: 1.0

    4. Prometheus & Grafana

    Prometheus & Grafana

    프로메테우스(Prometheus) 와 그라파나(Grafana) 는 모니터링을 위한 도구입니다.
    안정적인 서비스 운영을 위해서는 서비스와 서비스가 운영되고 있는 인프라의 상태를 지속해서 관찰하고, 관찰한 메트릭을 바탕으로 문제가 생길 때 빠르게 대응해야 합니다.
    -이러한 모니터링을 효율적으로 수행하기 위한 많은 도구 중 모두의 MLOps에서는 오픈소스인 프로메테우스와 그라파나를 사용할 예정입니다.

    더 자세한 내용은 Prometheus 공식 문서, Grafana 공식 문서를 확인해주시기를 바랍니다.

    프로메테우스는 다양한 대상으로부터 Metric을 수집하는 도구이며, 그라파나는 모인 데이터를 시각화하는 것을 도와주는 도구입니다. 서로 간의 종속성은 없지만 상호 보완적으로 사용할 수 있어 함께 사용되는 경우가 많습니다.

    이번 페이지에서는 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치한 뒤, Seldon-Core 로 생성한 SeldonDeployment 로 API 요청을 보내, 정상적으로 Metrics 이 수집되는지 확인해보겠습니다.

    본 글에서는 seldonio/seldon-core-analytics Helm Chart 1.12.0 버전을 활용해 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치하고, Seldon-Core 에서 생성한 SeldonDeployment의 Metrics 을 효율적으로 확인하기 위한 대시보드도 함께 설치합니다.

    Helm Repository 추가

    helm repo add seldonio https://storage.googleapis.com/seldon-charts

    다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

    "seldonio" has been added to your repositories

    Helm Repository 업데이트

    helm repo update

    다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "seldonio" chart repository
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    seldon-core-analytics Helm Chart 1.12.0 버전을 설치합니다.

    helm install seldon-core-analytics seldonio/seldon-core-analytics \
    --namespace seldon-system \
    --version 1.12.0

    다음과 같은 메시지가 출력되어야 합니다.

    생략...
    NAME: seldon-core-analytics
    LAST DEPLOYED: Tue Dec 14 18:29:38 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1

    정상적으로 설치되었는지 확인합니다.

    kubectl get pod -n seldon-system | grep seldon-core-analytics

    seldon-system namespace 에 6개의 seldon-core-analytics 관련 pod 가 Running 이 될 때까지 기다립니다.

    seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
    seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
    seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
    seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
    seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
    seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

    정상 설치 확인

    그럼 이제 그라파나에 정상적으로 접속되는지 확인해보겠습니다.

    우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    웹 브라우저를 열어 localhost:8090으로 접속하면 다음과 같은 화면이 출력됩니다.

    grafana-install

    다음과 같은 접속정보를 입력하여 접속합니다.

    • Email or username : admin
    • Password : password

    로그인하면 다음과 같은 화면이 출력됩니다.

    grafana-login

    좌측의 대시보드 아이콘을 클릭하여, Manage 버튼을 클릭합니다.

    dashboard-click

    기본적인 그라파나 대시보드가 포함되어있는 것을 확인할 수 있습니다. 이 중 Prediction Analytics 대시보드를 클릭합니다.

    dashboard

    Seldon Core API Dashboard 가 보이고, 다음과 같이 출력되는 것을 확인할 수 있습니다.

    seldon-dashboard

    References

    - +이러한 모니터링을 효율적으로 수행하기 위한 많은 도구 중 모두의 MLOps에서는 오픈소스인 프로메테우스와 그라파나를 사용할 예정입니다.

    더 자세한 내용은 Prometheus 공식 문서, Grafana 공식 문서를 확인해주시기를 바랍니다.

    프로메테우스는 다양한 대상으로부터 Metric을 수집하는 도구이며, 그라파나는 모인 데이터를 시각화하는 것을 도와주는 도구입니다. 서로 간의 종속성은 없지만 상호 보완적으로 사용할 수 있어 함께 사용되는 경우가 많습니다.

    이번 페이지에서는 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치한 뒤, Seldon-Core 로 생성한 SeldonDeployment 로 API 요청을 보내, 정상적으로 Metrics 이 수집되는지 확인해보겠습니다.

    본 글에서는 seldonio/seldon-core-analytics Helm Chart 1.12.0 버전을 활용해 쿠버네티스 클러스터에 프로메테우스와 그라파나를 설치하고, Seldon-Core 에서 생성한 SeldonDeployment의 Metrics 을 효율적으로 확인하기 위한 대시보드도 함께 설치합니다.

    Helm Repository 추가

    helm repo add seldonio https://storage.googleapis.com/seldon-charts

    다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

    "seldonio" has been added to your repositories

    Helm Repository 업데이트

    helm repo update

    다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "seldonio" chart repository
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    seldon-core-analytics Helm Chart 1.12.0 버전을 설치합니다.

    helm install seldon-core-analytics seldonio/seldon-core-analytics \
    --namespace seldon-system \
    --version 1.12.0

    다음과 같은 메시지가 출력되어야 합니다.

    생략...
    NAME: seldon-core-analytics
    LAST DEPLOYED: Tue Dec 14 18:29:38 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1

    정상적으로 설치되었는지 확인합니다.

    kubectl get pod -n seldon-system | grep seldon-core-analytics

    seldon-system namespace 에 6개의 seldon-core-analytics 관련 pod 가 Running 이 될 때까지 기다립니다.

    seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
    seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
    seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
    seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
    seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
    seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

    정상 설치 확인

    그럼 이제 그라파나에 정상적으로 접속되는지 확인해보겠습니다.

    우선 클라이언트 노드에서 접속하기 위해, 포트포워딩을 수행합니다.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    웹 브라우저를 열어 localhost:8090으로 접속하면 다음과 같은 화면이 출력됩니다.

    grafana-install

    다음과 같은 접속정보를 입력하여 접속합니다.

    • Email or username : admin
    • Password : password

    로그인하면 다음과 같은 화면이 출력됩니다.

    grafana-login

    좌측의 대시보드 아이콘을 클릭하여, Manage 버튼을 클릭합니다.

    dashboard-click

    기본적인 그라파나 대시보드가 포함되어있는 것을 확인할 수 있습니다. 이 중 Prediction Analytics 대시보드를 클릭합니다.

    dashboard

    Seldon Core API Dashboard 가 보이고, 다음과 같이 출력되는 것을 확인할 수 있습니다.

    seldon-dashboard

    References

    + \ No newline at end of file diff --git a/docs/setup-components/install-components-seldon/index.html b/docs/setup-components/install-components-seldon/index.html index e0ec8104..b90f8d01 100644 --- a/docs/setup-components/install-components-seldon/index.html +++ b/docs/setup-components/install-components-seldon/index.html @@ -7,15 +7,15 @@ - +
    버전: 1.0

    3. Seldon-Core

    Seldon-Core

    Seldon-Core는 쿠버네티스 환경에 수많은 머신러닝 모델을 배포하고 관리할 수 있는 오픈소스 프레임워크 중 하나입니다.
    더 자세한 내용은 Seldon-Core 의 공식 제품 설명 페이지깃헙 그리고 API Deployment 파트를 참고해주시기를 바랍니다.

    Selon-Core 설치

    Seldon-Core를 사용하기 위해서는 쿠버네티스의 인그레스(Ingress)를 담당하는 Ambassador 와 Istio 와 같은 모듈이 필요합니다.
    -Seldon-Core 에서는 Ambassador 와 Istio 만을 공식적으로 지원하며, 모두의 MLOps에서는 Ambassador를 사용해 Seldon-core를 사용하므로 Ambassador를 설치하겠습니다.

    Ambassador - Helm Repository 추가

    helm repo add datawire https://www.getambassador.io

    다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

    "datawire" has been added to your repositories

    Ambassador - Helm Repository 업데이트

    helm repo update

    다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Ambassador - Helm Install

    ambassador Chart 6.9.3 버전을 설치합니다.

    helm install ambassador datawire/ambassador \
    --namespace seldon-system \
    --create-namespace \
    --set image.repository=quay.io/datawire/ambassador \
    --set enableAES=false \
    --set crds.keep=false \
    --version 6.9.3

    다음과 같은 메시지가 출력되어야 합니다.

    생략...

    W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
    W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
    NAME: ambassador
    LAST DEPLOYED: Mon Dec 6 17:01:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    NOTES:
    -------------------------------------------------------------------------------
    Congratulations! You've successfully installed Ambassador!

    -------------------------------------------------------------------------------
    To get the IP address of Ambassador, run the following commands:
    NOTE: It may take a few minutes for the LoadBalancer IP to be available.
    You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

    On GKE/Azure:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

    On AWS:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

    echo http://$SERVICE_IP:

    For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

    seldon-system 에 4 개의 pod 가 Running 이 될 때까지 기다립니다.

    kubectl get pod -n seldon-system
    ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
    ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
    ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
    ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

    Seldon-Core - Helm Install

    seldon-core-operator Chart 1.11.2 버전을 설치합니다.

    helm install seldon-core seldon-core-operator \
    --repo https://storage.googleapis.com/seldon-charts \
    --namespace seldon-system \
    --set usageMetrics.enabled=true \
    --set ambassador.enabled=true \
    --version 1.11.2

    다음과 같은 메시지가 출력되어야 합니다.

    생략...

    W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
    NAME: seldon-core
    LAST DEPLOYED: Mon Dec 6 17:05:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    seldon-system namespace 에 1 개의 seldon-controller-manager pod 가 Running 이 될 때까지 기다립니다.

    kubectl get pod -n seldon-system | grep seldon-controller
    seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

    References

    - +Seldon-Core 에서는 Ambassador 와 Istio 만을 공식적으로 지원하며, 모두의 MLOps에서는 Ambassador를 사용해 Seldon-core를 사용하므로 Ambassador를 설치하겠습니다.

    Ambassador - Helm Repository 추가

    helm repo add datawire https://www.getambassador.io

    다음과 같은 메시지가 출력되면 정상적으로 추가된 것을 의미합니다.

    "datawire" has been added to your repositories

    Ambassador - Helm Repository 업데이트

    helm repo update

    다음과 같은 메시지가 출력되면 정상적으로 업데이트된 것을 의미합니다.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Ambassador - Helm Install

    ambassador Chart 6.9.3 버전을 설치합니다.

    helm install ambassador datawire/ambassador \
    --namespace seldon-system \
    --create-namespace \
    --set image.repository=quay.io/datawire/ambassador \
    --set enableAES=false \
    --set crds.keep=false \
    --version 6.9.3

    다음과 같은 메시지가 출력되어야 합니다.

    생략...

    W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
    W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
    NAME: ambassador
    LAST DEPLOYED: Mon Dec 6 17:01:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    NOTES:
    -------------------------------------------------------------------------------
    Congratulations! You've successfully installed Ambassador!

    -------------------------------------------------------------------------------
    To get the IP address of Ambassador, run the following commands:
    NOTE: It may take a few minutes for the LoadBalancer IP to be available.
    You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

    On GKE/Azure:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

    On AWS:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

    echo http://$SERVICE_IP:

    For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

    seldon-system 에 4 개의 pod 가 Running 이 될 때까지 기다립니다.

    kubectl get pod -n seldon-system
    ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
    ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
    ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
    ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

    Seldon-Core - Helm Install

    seldon-core-operator Chart 1.11.2 버전을 설치합니다.

    helm install seldon-core seldon-core-operator \
    --repo https://storage.googleapis.com/seldon-charts \
    --namespace seldon-system \
    --set usageMetrics.enabled=true \
    --set ambassador.enabled=true \
    --version 1.11.2

    다음과 같은 메시지가 출력되어야 합니다.

    생략...

    W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
    NAME: seldon-core
    LAST DEPLOYED: Mon Dec 6 17:05:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    seldon-system namespace 에 1 개의 seldon-controller-manager pod 가 Running 이 될 때까지 기다립니다.

    kubectl get pod -n seldon-system | grep seldon-controller
    seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

    References

    + \ No newline at end of file diff --git a/docs/setup-kubernetes/install-kubernetes-module/index.html b/docs/setup-kubernetes/install-kubernetes-module/index.html index 91a3efe8..f175d824 100644 --- a/docs/setup-kubernetes/install-kubernetes-module/index.html +++ b/docs/setup-kubernetes/install-kubernetes-module/index.html @@ -7,14 +7,14 @@ - +
    버전: 1.0

    5. Install Kubernetes Modules

    Setup Kubernetes Modules

    이번 페이지에서는 클러스터에서 사용할 모듈을 클라이언트 노드에서 설치하는 과정에 관해서 설명합니다.
    -앞으로 소개되는 과정은 모두 클라이언트 노드에서 진행됩니다.

    Helm

    Helm은 쿠버네티스 패키지와 관련된 자원을 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

    1. 현재 폴더에 Helm v3.7.1 버전을 내려받습니다.
    • For Linux amd64

      wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
    • 다른 OS는 공식 홈페이지를 참고하시어, 클라이언트 노드의 OS와 CPU에 맞는 바이너리의 다운 경로를 확인하시기 바랍니다.

    1. helm을 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

      tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
      sudo mv linux-amd64/helm /usr/local/bin/helm
    2. 정상적으로 설치되었는지 확인합니다.

      helm help

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      The Kubernetes package manager

      Common actions for Helm:
    • helm search: search for charts

    • helm pull: download a chart to your local directory to view

    • helm install: upload the chart to Kubernetes

    • helm list: list releases of charts

      Environment variables:

      NameDescription
      $HELM_CACHE_HOMEset an alternative location for storing cached files.
      $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
      $HELM_DATA_HOMEset an alternative location for storing Helm data.

      ...


    Kustomize

    kustomize 또한 여러 쿠버네티스 리소스를 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

    1. 현재 폴더에 kustomize v3.10.0 버전의 바이너리를 다운받습니다.
    • For Linux amd64

      wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
    • 다른 OS는 kustomize/v3.10.0에서 확인 후 다운로드 받습니다.

    1. kustomize 를 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

      tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
      sudo mv kustomize /usr/local/bin/kustomize
    2. 정상적으로 설치되었는지 확인합니다.

      kustomize help

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      Manages declarative configuration of Kubernetes.
      See https://sigs.k8s.io/kustomize

      Usage:
      kustomize [command]

      Available Commands:
      build Print configuration per contents of kustomization.yaml
      cfg Commands for reading and writing configuration.
      completion Generate shell completion script
      create Create a new kustomization in the current directory
      edit Edits a kustomization file
      fn Commands for running functions against configuration.
      ...

    CSI Plugin : Local Path Provisioner

    1. CSI Plugin은 kubernetes 내의 스토리지를 담당하는 모듈입니다. 단일 노드 클러스터에서 쉽게 사용할 수 있는 CSI Plugin인 Local Path Provisioner를 설치합니다.

      kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      namespace/local-path-storage created
      serviceaccount/local-path-provisioner-service-account created
      clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
      clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
      deployment.apps/local-path-provisioner created
      storageclass.storage.k8s.io/local-path created
      configmap/local-path-config created
    2. 또한, 다음과 같이 local-path-storage namespace 에 provisioner pod이 Running 인지 확인합니다.

      kubectl -n local-path-storage get pod

      정상적으로 수행되면 아래와 같이 출력됩니다.

      NAME                                     READY     STATUS    RESTARTS   AGE
      local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
    3. 다음을 수행하여 default storage class로 변경합니다.

      kubectl patch storageclass local-path  -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

      정상적으로 수행되면 아래와 같이 출력됩니다.

      storageclass.storage.k8s.io/local-path patched
    4. default storage class로 설정되었는지 확인합니다.

      kubectl get sc

      다음과 같이 NAME에 local-path (default) 인 storage class가 존재하는 것을 확인합니다.

      NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
      local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
    - +앞으로 소개되는 과정은 모두 클라이언트 노드에서 진행됩니다.

    Helm

    Helm은 쿠버네티스 패키지와 관련된 자원을 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

    1. 현재 폴더에 Helm v3.7.1 버전을 내려받습니다.
    • For Linux amd64

      wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
    • 다른 OS는 공식 홈페이지를 참고하시어, 클라이언트 노드의 OS와 CPU에 맞는 바이너리의 다운 경로를 확인하시기 바랍니다.

    1. helm을 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

      tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
      sudo mv linux-amd64/helm /usr/local/bin/helm
    2. 정상적으로 설치되었는지 확인합니다.

      helm help

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      The Kubernetes package manager

      Common actions for Helm:
    • helm search: search for charts

    • helm pull: download a chart to your local directory to view

    • helm install: upload the chart to Kubernetes

    • helm list: list releases of charts

      Environment variables:

      NameDescription
      $HELM_CACHE_HOMEset an alternative location for storing cached files.
      $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
      $HELM_DATA_HOMEset an alternative location for storing Helm data.

      ...


    Kustomize

    kustomize 또한 여러 쿠버네티스 리소스를 한 번에 배포하고 관리할 수 있게 도와주는 패키지 매니징 도구 중 하나입니다.

    1. 현재 폴더에 kustomize v3.10.0 버전의 바이너리를 다운받습니다.
    • For Linux amd64

      wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
    • 다른 OS는 kustomize/v3.10.0에서 확인 후 다운로드 받습니다.

    1. kustomize 를 사용할 수 있도록 압축을 풀고, 파일의 위치를 변경합니다.

      tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
      sudo mv kustomize /usr/local/bin/kustomize
    2. 정상적으로 설치되었는지 확인합니다.

      kustomize help

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      Manages declarative configuration of Kubernetes.
      See https://sigs.k8s.io/kustomize

      Usage:
      kustomize [command]

      Available Commands:
      build Print configuration per contents of kustomization.yaml
      cfg Commands for reading and writing configuration.
      completion Generate shell completion script
      create Create a new kustomization in the current directory
      edit Edits a kustomization file
      fn Commands for running functions against configuration.
      ...

    CSI Plugin : Local Path Provisioner

    1. CSI Plugin은 kubernetes 내의 스토리지를 담당하는 모듈입니다. 단일 노드 클러스터에서 쉽게 사용할 수 있는 CSI Plugin인 Local Path Provisioner를 설치합니다.

      kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      namespace/local-path-storage created
      serviceaccount/local-path-provisioner-service-account created
      clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
      clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
      deployment.apps/local-path-provisioner created
      storageclass.storage.k8s.io/local-path created
      configmap/local-path-config created
    2. 또한, 다음과 같이 local-path-storage namespace 에 provisioner pod이 Running 인지 확인합니다.

      kubectl -n local-path-storage get pod

      정상적으로 수행되면 아래와 같이 출력됩니다.

      NAME                                     READY     STATUS    RESTARTS   AGE
      local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
    3. 다음을 수행하여 default storage class로 변경합니다.

      kubectl patch storageclass local-path  -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

      정상적으로 수행되면 아래와 같이 출력됩니다.

      storageclass.storage.k8s.io/local-path patched
    4. default storage class로 설정되었는지 확인합니다.

      kubectl get sc

      다음과 같이 NAME에 local-path (default) 인 storage class가 존재하는 것을 확인합니다.

      NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
      local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
    + \ No newline at end of file diff --git a/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html b/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html index c1d6fc28..75a541db 100644 --- a/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html +++ b/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ (보안 문제와 관련된 키들은 <...>로 가렸습니다.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://127.0.0.1:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    2. 쿠버네티스 클러스터 셋업

    k3s config를 클러스터의 kubeconfig로 사용하기 위해서 복사합니다.

    mkdir .kube
    sudo cp /etc/rancher/k3s/k3s.yaml .kube/config

    복사된 config 파일에 user가 접근할 수 있는 권한을 줍니다.

    sudo chown $USER:$USER .kube/config

    3. 쿠버네티스 클라이언트 셋업

    이제 클러스터에서 설정한 kubeconfig를 로컬로 이동합니다. 로컬에서는 경로를 ~/.kube/config로 설정합니다.

    처음 복사한 config 파일에는 server ip가 https://127.0.0.1:6443 으로 되어 있습니다.
    이 값을 클러스터의 ip에 맞게 수정합니다.
    -(이번 페이지에서 사용하는 클러스터의 ip에 맞춰서 https://192.168.0.19:6443 으로 수정했습니다.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://192.168.0.19:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

    kubectl get nodes -o wide

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

    6. References

    - +(이번 페이지에서 사용하는 클러스터의 ip에 맞춰서 https://192.168.0.19:6443 으로 수정했습니다.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://192.168.0.19:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

    kubectl get nodes -o wide

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

    6. References

    + \ No newline at end of file diff --git a/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html b/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html index 481133e2..c7e57d4e 100644 --- a/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html +++ b/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ 실수로 이 컴포넌트들의 버전이 변경하면, 예기치 않은 장애를 낳을 수 있으므로 컴포넌트들이 변경되지 않도록 설정합니다.

    sudo apt-get update
    sudo apt-get install -y apt-transport-https ca-certificates curl &&
    sudo curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg &&
    echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list &&
    sudo apt-get update
    sudo apt-get install -y kubelet=1.21.7-00 kubeadm=1.21.7-00 kubectl=1.21.7-00 &&
    sudo apt-mark hold kubelet kubeadm kubectl

    kubeadm, kubelet, kubectl 이 잘 설치되었는지 확인합니다.

    mlops@ubuntu:~$ kubeadm version
    kubeadm version: &version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:40:08Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    mlops@ubuntu:~$ kubelet --version
    Kubernetes v1.21.7
    mlops@ubuntu:~$ kubectl version --client
    Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}

    이제 kubeadm을 사용하여 쿠버네티스를 설치합니다.

    kubeadm config images list
    kubeadm config images pull

    sudo kubeadm init --pod-network-cidr=10.244.0.0/16

    kubectl을 통해서 쿠버네티스 클러스터를 제어할 수 있도록 admin 인증서를 $HOME/.kube/config 경로에 복사합니다.

    mkdir -p $HOME/.kube
    sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
    sudo chown $(id -u):$(id -g) $HOME/.kube/config

    CNI를 설치합니다. 쿠버네티스 내부의 네트워크 설정을 전담하는 CNI는 여러 종류가 있으며, 모두의 MLOps에서는 flannel을 사용합니다.

    kubectl apply -f https://raw.githubusercontent.com/flannel-io/flannel/v0.13.0/Documentation/kube-flannel.yml

    쿠버네티스 노드의 종류에는 크게 마스터 노드워커 노드가 있습니다. 안정성을 위하여 마스터 노드에는 쿠버네티스 클러스터를 제어하는 작업만 실행되도록 하는 것이 일반적이지만, -이 매뉴얼에서는 싱글 클러스터를 가정하고 있으므로 마스터 노드에 모든 종류의 작업이 실행될 수 있도록 설정합니다.

    kubectl taint nodes --all node-role.kubernetes.io/master-

    3. 쿠버네티스 클라이언트 셋업

    클러스터에 생성된 kubeconfig 파일을 클라이언트에 복사하여 kubectl을 통해 클러스터를 제어할 수 있도록 합니다.

    mkdir -p $HOME/.kube
    scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    다음 명령어를 통해 노드의 STATUS가 Ready 상태가 되었는지 확인합니다.

    kubectl get nodes

    Ready 가 되면 다음과 비슷한 결과가 출력됩니다.

    NAME     STATUS   ROLES                  AGE     VERSION
    ubuntu Ready control-plane,master 2m55s v1.21.7

    6. References

    - +이 매뉴얼에서는 싱글 클러스터를 가정하고 있으므로 마스터 노드에 모든 종류의 작업이 실행될 수 있도록 설정합니다.

    kubectl taint nodes --all node-role.kubernetes.io/master-

    3. 쿠버네티스 클라이언트 셋업

    클러스터에 생성된 kubeconfig 파일을 클라이언트에 복사하여 kubectl을 통해 클러스터를 제어할 수 있도록 합니다.

    mkdir -p $HOME/.kube
    scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    다음 명령어를 통해 노드의 STATUS가 Ready 상태가 되었는지 확인합니다.

    kubectl get nodes

    Ready 가 되면 다음과 비슷한 결과가 출력됩니다.

    NAME     STATUS   ROLES                  AGE     VERSION
    ubuntu Ready control-plane,master 2m55s v1.21.7

    6. References

    + \ No newline at end of file diff --git a/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html b/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html index a62a3aea..d768a2a0 100644 --- a/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html +++ b/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html @@ -7,7 +7,7 @@ - + @@ -26,8 +26,8 @@ users:
  • name: minikube user: client-certificate-data: LS0tLS1CRUdJTi.... -client-key-data: LS0tLS1CRUdJTiBSU0....

    1. 클라이언트 노드에서 .kube 폴더를 생성합니다.

      # 클라이언트 노드
      mkdir -p /home/$USER/.kube
    2. 해당 파일에 2. 에서 출력된 정보를 붙여넣은 뒤 저장합니다.

      vi /home/$USER/.kube/config

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

    kubectl get nodes -o wide

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    - +client-key-data: LS0tLS1CRUdJTiBSU0....

    1. 클라이언트 노드에서 .kube 폴더를 생성합니다.

      # 클라이언트 노드
      mkdir -p /home/$USER/.kube
    2. 해당 파일에 2. 에서 출력된 정보를 붙여넣은 뒤 저장합니다.

      vi /home/$USER/.kube/config

    4. 쿠버네티스 기본 모듈 설치

    Setup Kubernetes Modules을 참고하여 다음 컴포넌트들을 설치해 주시기 바랍니다.

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. 정상 설치 확인

    최종적으로 node가 Ready 인지, OS, Docker, Kubernetes 버전을 확인합니다.

    kubectl get nodes -o wide

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    + \ No newline at end of file diff --git a/docs/setup-kubernetes/install-prerequisite/index.html b/docs/setup-kubernetes/install-prerequisite/index.html index 91235f7d..4d5c4a43 100644 --- a/docs/setup-kubernetes/install-prerequisite/index.html +++ b/docs/setup-kubernetes/install-prerequisite/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    3. Install Prerequisite

    이 페이지에서는 쿠버네티스를 설치하기에 앞서, 클러스터클라이언트에 설치 혹은 설정해두어야 하는 컴포넌트들에 대한 매뉴얼을 설명합니다.

    Install apt packages

    추후 클라이언트와 클러스터의 원활한 통신을 위해서는 Port-Forwarding을 수행해야 할 일이 있습니다. Port-Forwarding을 위해서는 클러스터에 다음 패키지를 설치해 주어야 합니다.

    sudo apt-get update
    sudo apt-get install -y socat

    Install Docker

    1. 도커 설치에 필요한 APT 패키지들을 설치합니다.

      sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release
    2. 도커의 공식 GPG key를 추가합니다.

      curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
    3. apt 패키지 매니저로 도커를 설치할 때, stable Repository에서 받아오도록 설정합니다.

      echo \
      "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
      $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
    4. 현재 설치할 수 있는 도커 버전을 확인합니다.

      sudo apt-get update && apt-cache madison docker-ce

      출력되는 버전 중 5:20.10.11~3-0~ubuntu-focal 버전이 있는지 확인합니다.

      apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal

      정상적으로 추가가 된 경우 다음과 같이 출력됩니다.

      docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages
    5. 5:20.10.11~3-0~ubuntu-focal 버전의 도커를 설치합니다.

      sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal
    6. 도커가 정상적으로 설치된 것을 확인합니다.

      sudo docker run hello-world

      명령어 실행 후 다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      mlops@ubuntu:~$ sudo docker run hello-world

      Hello from Docker!
      This message shows that your installation appears to be working correctly.

      To generate this message, Docker took the following steps:
      1. The Docker client contacted the Docker daemon.
      2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
      (amd64)
      3. The Docker daemon created a new container from that image which runs the
      executable that produces the output you are currently reading.
      4. The Docker daemon streamed that output to the Docker client, which sent it
      to your terminal.

      To try something more ambitious, you can run an Ubuntu container with:
      $ docker run -it ubuntu bash

      Share images, automate workflows, and more with a free Docker ID:
      https://hub.docker.com/

      For more examples and ideas, visit:
      https://docs.docker.com/get-started/
    7. docker 관련 command를 sudo 키워드 없이 사용할 수 있게 하도록 다음 명령어를 통해 권한을 추가합니다.

      sudo groupadd docker
      sudo usermod -aG docker $USER
      newgrp docker
    8. sudo 키워드 없이 docker command를 사용할 수 있게 된 것을 확인하기 위해, 다시 한번 docker run을 실행합니다.

      docker run hello-world

      명령어 실행 후 다음과 같은 메시지가 보이면 정상적으로 권한이 추가된 것을 의미합니다.

      mlops@ubuntu:~$ docker run hello-world

      Hello from Docker!
      This message shows that your installation appears to be working correctly.

      To generate this message, Docker took the following steps:
      1. The Docker client contacted the Docker daemon.
      2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
      (amd64)
      3. The Docker daemon created a new container from that image which runs the
      executable that produces the output you are currently reading.
      4. The Docker daemon streamed that output to the Docker client, which sent it
      to your terminal.

      To try something more ambitious, you can run an Ubuntu container with:
      $ docker run -it ubuntu bash

      Share images, automate workflows, and more with a free Docker ID:
      https://hub.docker.com/

      For more examples and ideas, visit:
      https://docs.docker.com/get-started/

    Turn off Swap Memory

    kubelet 이 정상적으로 동작하게 하기 위해서는 클러스터 노드에서 swap이라고 불리는 가상메모리를 꺼 두어야 합니다. 다음 명령어를 통해 swap을 꺼 둡니다.
    (클러스터와 클라이언트를 같은 데스크톱에서 사용할 때 swap 메모리를 종료하면 속도의 저하가 있을 수 있습니다)

    sudo sed -i '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab
    sudo swapoff -a

    Install Kubectl

    kubectl 은 쿠버네티스 클러스터에 API를 요청할 때 사용하는 클라이언트 툴입니다. 클라이언트 노드에 설치해두어야 합니다.

    1. 현재 폴더에 kubectl v1.21.7 버전을 다운받습니다.

      curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl
    2. kubectl 을 사용할 수 있도록 파일의 권한과 위치를 변경합니다.

      sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
    3. 정상적으로 설치되었는지 확인합니다.

      kubectl version --client

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    4. 여러 개의 쿠버네티스 클러스터를 사용하는 경우, 여러 개의 kubeconfig 파일을 관리해야 하는 경우가 있습니다.
      -여러 개의 kubeconfig 파일 혹은 여러 개의 kube-context를 효율적으로 관리하는 방법은 다음과 같은 문서를 참고하시기 바랍니다.

    References

    - +여러 개의 kubeconfig 파일 혹은 여러 개의 kube-context를 효율적으로 관리하는 방법은 다음과 같은 문서를 참고하시기 바랍니다.

    References

    + \ No newline at end of file diff --git a/docs/setup-kubernetes/intro/index.html b/docs/setup-kubernetes/intro/index.html index 8287cbe8..bb240a2c 100644 --- a/docs/setup-kubernetes/intro/index.html +++ b/docs/setup-kubernetes/intro/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ 클러스터 는 우분투가 설치되어 있는 데스크톱 하나를 의미합니다.
    클라이언트 는 노트북 혹은 클러스터가 설치되어 있는 데스크톱 외의 클라이언트로 사용할 수 있는 다른 데스크톱을 사용하는 것을 권장합니다.
    하지만 두 대의 머신을 준비할 수 없다면 데스크톱 하나를 동시에 클러스터와 클라이언트 용도로 사용하셔도 괜찮습니다.

    클러스터

    1. Software

    아래는 클러스터에 설치해야 할 소프트웨어 목록입니다.

    SoftwareVersion
    Ubuntu20.04.3 LTS
    Docker (Server)20.10.11
    NVIDIA-Driver470.86
    Kubernetesv1.21.7
    Kubeflowv1.4.0
    MLFlowv1.21.0

    2. Helm Chart

    아래는 Helm을 이용해 설치되어야 할 써드파티 소프트웨어 목록입니다.

    Helm Chart Repo NameVersion
    datawire/ambassador6.9.3
    seldonio/seldon-core-operator1.11.2

    클라이언트

    클라이언트는 MacOS (Intel CPU), Ubuntu 20.04 에서 검증되었습니다.

    SoftwareVersion
    kubectlv1.21.7
    helmv3.7.1
    kustomizev3.10.0

    Minimum System Requirements

    모두의 MLOps를 설치할 클러스터는 다음과 같은 사양을 만족시키는 것을 권장합니다.
    -이는 Kubernetes 및 Kubeflow 의 권장 사양에 의존합니다.

    • CPU : 6 core
    • RAM : 12GB
    • DISK : 50GB
    • GPU : NVIDIA GPU (Optional)
    - +이는 Kubernetes 및 Kubeflow 의 권장 사양에 의존합니다.

    • CPU : 6 core
    • RAM : 12GB
    • DISK : 50GB
    • GPU : NVIDIA GPU (Optional)
    + \ No newline at end of file diff --git a/docs/setup-kubernetes/kubernetes/index.html b/docs/setup-kubernetes/kubernetes/index.html index 14614e7e..7dd67b98 100644 --- a/docs/setup-kubernetes/kubernetes/index.html +++ b/docs/setup-kubernetes/kubernetes/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    2. Setup Kubernetes

    Setup Kubernetes Cluster

    쿠버네티스를 처음 배우시는 분들에게 첫 진입 장벽은 쿠버네티스 실습 환경을 구축하는 것입니다.

    프로덕션 레벨의 쿠버네티스 클러스터를 구축할 수 있게 공식적으로 지원하는 도구는 kubeadm 이지만, 사용자들이 조금 더 쉽게 구축할 수 있도록 도와주는 kubespray, kops 등의 도구도 존재하며, 학습 목적을 위해서 컴팩트한 쿠버네티스 클러스터를 정말 쉽게 구축할 수 있도록 도와주는 k3s, minikube, microk8s, kind 등의 도구도 존재합니다.

    각각의 도구는 장단점이 다르기에 사용자마다 선호하는 도구가 다른 점을 고려하여, 본 글에서는 kubeadm, k3s, minikube의 3가지 도구를 활용하여 쿠버네티스 클러스터를 구축하는 방법을 다룹니다. 각 도구에 대한 자세한 비교는 다음 쿠버네티스 공식 문서를 확인해주시기를 바랍니다.

    모두의 MLOps에서 권장하는 툴은 k3s로 쿠버네티스 클러스터를 구축할 때 쉽게 할 수 있다는 장점이 있습니다.
    만약 쿠버네티스의 모든 기능을 사용하고 노드 구성까지 활용하고 싶다면 kubeadm을 권장해 드립니다.
    -minikube 는 저희가 설명하는 컴포넌트 외에도 다른 쿠버네티스를 add-on 형식으로 쉽게 설치할 수 있다는 장점이 있습니다.

    모두의 MLOps에서는 구축하게 될 MLOps 구성 요소들을 원활히 사용하기 위해, 각각의 도구를 활용해 쿠버네티스 클러스터를 구축할 때, 추가로 설정해 주어야 하는 부분이 추가되어 있습니다.

    Ubuntu OS까지는 설치되어 있는 데스크탑을 k8s cluster로 구축한 뒤, 외부 클라이언트 노드에서 쿠버네티스 클러스터에 접근하는 것을 확인하는 것까지가 본 Setup Kubernetes단원의 범위입니다.

    자세한 구축 방법은 3가지 도구마다 다르기에 다음과 같은 흐름으로 구성되어 있습니다.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    그럼 이제 각각의 도구를 활용해 쿠버네티스 클러스터를 구축해보겠습니다. 반드시 모든 도구를 사용해 볼 필요는 없으며, 이 중 여러분이 익숙하신 도구를 활용해주시면 충분합니다.

    - +minikube 는 저희가 설명하는 컴포넌트 외에도 다른 쿠버네티스를 add-on 형식으로 쉽게 설치할 수 있다는 장점이 있습니다.

    모두의 MLOps에서는 구축하게 될 MLOps 구성 요소들을 원활히 사용하기 위해, 각각의 도구를 활용해 쿠버네티스 클러스터를 구축할 때, 추가로 설정해 주어야 하는 부분이 추가되어 있습니다.

    Ubuntu OS까지는 설치되어 있는 데스크탑을 k8s cluster로 구축한 뒤, 외부 클라이언트 노드에서 쿠버네티스 클러스터에 접근하는 것을 확인하는 것까지가 본 Setup Kubernetes단원의 범위입니다.

    자세한 구축 방법은 3가지 도구마다 다르기에 다음과 같은 흐름으로 구성되어 있습니다.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    그럼 이제 각각의 도구를 활용해 쿠버네티스 클러스터를 구축해보겠습니다. 반드시 모든 도구를 사용해 볼 필요는 없으며, 이 중 여러분이 익숙하신 도구를 활용해주시면 충분합니다.

    + \ No newline at end of file diff --git a/docs/setup-kubernetes/setup-nvidia-gpu/index.html b/docs/setup-kubernetes/setup-nvidia-gpu/index.html index e06b5961..2c710f31 100644 --- a/docs/setup-kubernetes/setup-nvidia-gpu/index.html +++ b/docs/setup-kubernetes/setup-nvidia-gpu/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    버전: 1.0

    6. (Optional) Setup GPU

    쿠버네티스 및 Kubeflow 등에서 GP 를 사용하기 위해서는 다음 작업이 필요합니다.

    1. Install NVIDIA Driver

    nvidia-smi 수행 시 다음과 같은 화면이 출력된다면 이 단계는 생략해 주시기 바랍니다.

    mlops@ubuntu:~$ nvidia-smi 
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    | 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |
    | 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |
    | 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |
    +-----------------------------------------------------------------------------+

    nvidia-smi의 출력 결과가 위와 같지 않다면 장착된 GPU에 맞는 nvidia driver를 설치해 주시기 바랍니다.

    만약 nvidia driver의 설치에 익숙하지 않다면 아래 명령어를 통해 설치하시기 바랍니다.

    sudo add-apt-repository ppa:graphics-drivers/ppa
    sudo apt update && sudo apt install -y ubuntu-drivers-common
    sudo ubuntu-drivers autoinstall
    sudo reboot

    2. NVIDIA-Docker 설치

    NVIDIA-Docker를 설치합니다.

    curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \
    sudo apt-key add -
    distribution=$(. /etc/os-release;echo $ID$VERSION_ID)
    curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list
    sudo apt-get update
    sudo apt-get install -y nvidia-docker2 &&
    sudo systemctl restart docker

    정상적으로 설치되었는지 확인하기 위해, GPU를 사용하는 도커 컨테이너를 실행해봅니다.

    sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi

    다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

    mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    +-----------------------------------------------------------------------------+

    3. NVIDIA-Docker를 Default Container Runtime으로 설정

    쿠버네티스는 기본적으로 Docker-CE를 Default Container Runtime으로 사용합니다. 따라서, Docker Container 내에서 NVIDIA GPU를 사용하기 위해서는 NVIDIA-Docker 를 Container Runtime 으로 사용하여 pod를 생성할 수 있도록 Default Runtime을 수정해 주어야 합니다.

    1. /etc/docker/daemon.json 파일을 열어 다음과 같이 수정합니다.

      sudo vi /etc/docker/daemon.json

      {
      "default-runtime": "nvidia",
      "runtimes": {
      "nvidia": {
      "path": "nvidia-container-runtime",
      "runtimeArgs": []
      }
      }
      }
    2. 파일이 변경된 것을 확인한 후, Docker를 재시작합니다.

      sudo systemctl daemon-reload
      sudo service docker restart
    3. 변경 사항이 반영되었는지 확인합니다.

      sudo docker info | grep nvidia

      다음과 같은 메시지가 보이면 정상적으로 설치된 것을 의미합니다.

      mlops@ubuntu:~$ docker info | grep nvidia
      Runtimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc
      Default Runtime: nvidia

    4. Nvidia-Device-Plugin

    1. nvidia-device-plugin daemonset을 생성합니다.

      kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml
    2. nvidia-device-plugin pod이 RUNNING 상태로 생성되었는지 확인합니다.

      kubectl get pod -n kube-system | grep nvidia

      다음과 같은 결과가 출력되어야 합니다.

      kube-system       nvidia-device-plugin-daemonset-nlqh2         1/1     Running   0      1h
    3. node 정보에 gpu가 사용가능하도록 설정되었는지 확인합니다.

      kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\.com/gpu"

      다음과 같은 메시지가 보이면 정상적으로 설정된 것을 의미합니다.
      (모두의 MLOps 에서 실습을 진행한 클러스터는 2개의 GPU가 있어서 2가 출력됩니다. -본인의 클러스터의 GPU 개수와 맞는 숫자가 출력된다면 됩니다.)

      NAME       GPU
      ubuntu 2

    설정되지 않은 경우, GPU의 value가 <None> 으로 표시됩니다.

    - +본인의 클러스터의 GPU 개수와 맞는 숫자가 출력된다면 됩니다.)

    NAME       GPU
    ubuntu 2

    설정되지 않은 경우, GPU의 value가 <None> 으로 표시됩니다.

    + \ No newline at end of file diff --git a/en/404.html b/en/404.html index 4d2897ca..800b05e3 100644 --- a/en/404.html +++ b/en/404.html @@ -7,13 +7,13 @@ - +

    Page Not Found

    We could not find what you were looking for.

    Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

    - + \ No newline at end of file diff --git a/en/assets/js/0096c9e8.9718f7f6.js b/en/assets/js/0096c9e8.49d0a202.js similarity index 99% rename from en/assets/js/0096c9e8.9718f7f6.js rename to en/assets/js/0096c9e8.49d0a202.js index d1332a29..f750b155 100644 --- a/en/assets/js/0096c9e8.9718f7f6.js +++ b/en/assets/js/0096c9e8.49d0a202.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2515],{3905:(t,e,a)=>{a.d(e,{Zo:()=>m,kt:()=>g});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function o(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),s=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):o(o({},e),t)),a},m=function(t){var e=s(t.components);return n.createElement(i.Provider,{value:e},t.children)},d="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},c=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,m=p(t,["components","mdxType","originalType","parentName"]),d=s(a),c=r,g=d["".concat(i,".").concat(c)]||d[c]||u[c]||l;return a?n.createElement(g,o(o({ref:e},m),{},{components:a})):n.createElement(g,o({ref:e},m))}));function g(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,o=new Array(l);o[0]=c;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[d]="string"==typeof t?t:r,o[1]=p;for(var s=2;s{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const l={title:"Further Readings",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},o=void 0,p={unversionedId:"further-readings/info",id:"version-1.0/further-readings/info",title:"Further Readings",description:"MLOps Component",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/en/docs/1.0/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/further-readings/info.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",frontMatter:{title:"Further Readings",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/1.0/appendix/metallb"}},i={},s=[{value:"MLOps Component",id:"mlops-component",level:2}],m={toc:s},d="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(d,(0,n.Z)({},m,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,"From the components covered in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/introduction/component"},"MLOps Concepts"),", the following diagram illustrates them. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(818).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"The technology stacks covered in ",(0,r.kt)("em",{parentName:"p"},"Everyone's MLOps")," are as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(636).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"| | Storage | ",(0,r.kt)("a",{parentName:"p",href:"https://min.io/"},"Minio")," |\n| | Data Processing | ",(0,r.kt)("a",{parentName:"p",href:"https://spark.apache.org/"},"Apache Spark")," |\n| | Data Visualization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.tableau.com/"},"Tableau")," |\n| Workflow Mgmt. | Orchestration | ",(0,r.kt)("a",{parentName:"p",href:"https://airflow.apache.org/"},"Airflow")," |\n| | Scheduling | ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/"},"Kubernetes")," |\n| Security & Compliance | Authentication & Authorization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openldap.org/"},"Ldap")," |\n| | Data Encryption & Tokenization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.vaultproject.io/"},"Vault")," |\n| | Governance & Auditing | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openpolicyagent.org/"},"Open Policy Agent")," |"),(0,r.kt)("p",null,"As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(3750).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"For details:"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},818:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},636:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},3750:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2515],{3905:(t,e,a)=>{a.d(e,{Zo:()=>m,kt:()=>g});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function o(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),s=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):o(o({},e),t)),a},m=function(t){var e=s(t.components);return n.createElement(i.Provider,{value:e},t.children)},d="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},c=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,m=p(t,["components","mdxType","originalType","parentName"]),d=s(a),c=r,g=d["".concat(i,".").concat(c)]||d[c]||u[c]||l;return a?n.createElement(g,o(o({ref:e},m),{},{components:a})):n.createElement(g,o({ref:e},m))}));function g(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,o=new Array(l);o[0]=c;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[d]="string"==typeof t?t:r,o[1]=p;for(var s=2;s{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>p,toc:()=>s});var n=a(7462),r=(a(7294),a(3905));const l={title:"Further Readings",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},o=void 0,p={unversionedId:"further-readings/info",id:"version-1.0/further-readings/info",title:"Further Readings",description:"MLOps Component",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/en/docs/1.0/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/further-readings/info.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",frontMatter:{title:"Further Readings",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/1.0/appendix/metallb"}},i={},s=[{value:"MLOps Component",id:"mlops-component",level:2}],m={toc:s},d="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(d,(0,n.Z)({},m,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,"From the components covered in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/introduction/component"},"MLOps Concepts"),", the following diagram illustrates them. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(818).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"The technology stacks covered in ",(0,r.kt)("em",{parentName:"p"},"Everyone's MLOps")," are as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(636).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"| | Storage | ",(0,r.kt)("a",{parentName:"p",href:"https://min.io/"},"Minio")," |\n| | Data Processing | ",(0,r.kt)("a",{parentName:"p",href:"https://spark.apache.org/"},"Apache Spark")," |\n| | Data Visualization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.tableau.com/"},"Tableau")," |\n| Workflow Mgmt. | Orchestration | ",(0,r.kt)("a",{parentName:"p",href:"https://airflow.apache.org/"},"Airflow")," |\n| | Scheduling | ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/"},"Kubernetes")," |\n| Security & Compliance | Authentication & Authorization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openldap.org/"},"Ldap")," |\n| | Data Encryption & Tokenization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.vaultproject.io/"},"Vault")," |\n| | Governance & Auditing | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openpolicyagent.org/"},"Open Policy Agent")," |"),(0,r.kt)("p",null,"As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(3750).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"For details:"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},818:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},636:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},3750:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/02b9e606.788d344d.js b/en/assets/js/02b9e606.9d100819.js similarity index 99% rename from en/assets/js/02b9e606.788d344d.js rename to en/assets/js/02b9e606.9d100819.js index 4442e5dd..746bbaa2 100644 --- a/en/assets/js/02b9e606.788d344d.js +++ b/en/assets/js/02b9e606.9d100819.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2759],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>f});var o=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=o.createContext({}),u=function(e){var n=o.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},s=function(e){var n=u(e.components);return o.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},d=o.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),m=u(t),d=r,f=m["".concat(l,".").concat(d)]||m[d]||c[d]||a;return t?o.createElement(f,i(i({ref:n},s),{},{components:t})):o.createElement(f,i({ref:n},s))}));function f(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,i=new Array(a);i[0]=d;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,i[1]=p;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>a,metadata:()=>p,toc:()=>u});var o=t(7462),r=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},i=void 0,p={unversionedId:"kubeflow/basic-component",id:"kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/en/docs/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/en/docs/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/en/docs/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],s={toc:u},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,o.Z)({},s,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component"},"Component"),(0,r.kt)("p",null,"In order to write a component, the following must be written: "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Writing Component Contents "),(0,r.kt)("li",{parentName:"ol"},"Writing Component Wrapper ")),(0,r.kt)("p",null,"Now, let's look at each process."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"Component Contents are no different from the Python code we commonly write.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, let's try writing a component that takes a number as input, prints it, and then returns it.\nWe can write it in Python code like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,r.kt)("p",null,"However, when this code is run, an error occurs and it does not work because the ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that should be printed is not defined. "),(0,r.kt)("p",null,"As we saw in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),", values like ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that are required in component content are defined in ",(0,r.kt)("strong",{parentName:"p"},"Config"),". In order to execute component content, the necessary Configs must be passed from the component wrapper."),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"Now we need to create a component wrapper to be able to pass the required Configs."),(0,r.kt)("p",null,"Without a separate Config, it will be like this when wrapped with a component wrapper."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,r.kt)("p",null,"Now we add the required Config for the content as an argument to the wrapper. However, it is not just writing the argument but also writing the type hint of the argument. When Kubeflow converts the pipeline into the Kubeflow format, it checks if the specified input and output types are matched in the connection between the components. If the format of the input required by the component does not match the output received from another component, the pipeline cannot be created."),(0,r.kt)("p",null,"Now we complete the component wrapper by writing down the argument, its type and the type to be returned as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("p",null,"In Kubeflow, you can only use types that can be expressed in json as return values. The most commonly used and recommended types are as follows:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"int"),(0,r.kt)("li",{parentName:"ul"},"float"),(0,r.kt)("li",{parentName:"ul"},"str")),(0,r.kt)("p",null,"If you want to return multiple values instead of a single value, you must use ",(0,r.kt)("inlineCode",{parentName:"p"},"collections.namedtuple"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the Kubeflow official documentation ",(0,r.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow Official Documentation"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For example, if you want to write a component that returns the quotient and remainder of a number when divided by 2, it should be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Now you have to convert the written component into a format that can be used in Kubeflow. The conversion can be done through ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func"),". This converted form can be imported as a function in Python and used in the pipeline."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,r.kt)("p",null,"If it is not possible to share with Python code, you can share components with a YAML file and use them.\nTo do this, first convert the component to a YAML file and then use it in the pipeline with ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file"),"."),(0,r.kt)("p",null,"First, let's explain the process of converting the written component to a YAML file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,r.kt)("p",null,"If you run the Python code you wrote, a file called ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," will be created. When you check the file, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,r.kt)("p",null,"Now the generated file can be shared and used in the pipeline as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,r.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,r.kt)("p",null,"In Kubeflow, the execution order of components is as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull "),": Pull the image containing the execution environment information of the defined component."),(0,r.kt)("li",{parentName:"ol"},"Run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"),": Execute the component's content within the pulled image.")),(0,r.kt)("p",null,"Taking ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," as an example, the default image in ",(0,r.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7"),", so the component's content will be executed based on that image."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,r.kt)("h2",{id:"references"},"References:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2759],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>f});var o=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=o.createContext({}),u=function(e){var n=o.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},s=function(e){var n=u(e.components);return o.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},d=o.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),m=u(t),d=r,f=m["".concat(l,".").concat(d)]||m[d]||c[d]||a;return t?o.createElement(f,i(i({ref:n},s),{},{components:t})):o.createElement(f,i({ref:n},s))}));function f(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,i=new Array(a);i[0]=d;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,i[1]=p;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>a,metadata:()=>p,toc:()=>u});var o=t(7462),r=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},i=void 0,p={unversionedId:"kubeflow/basic-component",id:"kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/en/docs/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/en/docs/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/en/docs/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],s={toc:u},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,o.Z)({},s,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component"},"Component"),(0,r.kt)("p",null,"In order to write a component, the following must be written: "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Writing Component Contents "),(0,r.kt)("li",{parentName:"ol"},"Writing Component Wrapper ")),(0,r.kt)("p",null,"Now, let's look at each process."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"Component Contents are no different from the Python code we commonly write.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, let's try writing a component that takes a number as input, prints it, and then returns it.\nWe can write it in Python code like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,r.kt)("p",null,"However, when this code is run, an error occurs and it does not work because the ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that should be printed is not defined. "),(0,r.kt)("p",null,"As we saw in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),", values like ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that are required in component content are defined in ",(0,r.kt)("strong",{parentName:"p"},"Config"),". In order to execute component content, the necessary Configs must be passed from the component wrapper."),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"Now we need to create a component wrapper to be able to pass the required Configs."),(0,r.kt)("p",null,"Without a separate Config, it will be like this when wrapped with a component wrapper."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,r.kt)("p",null,"Now we add the required Config for the content as an argument to the wrapper. However, it is not just writing the argument but also writing the type hint of the argument. When Kubeflow converts the pipeline into the Kubeflow format, it checks if the specified input and output types are matched in the connection between the components. If the format of the input required by the component does not match the output received from another component, the pipeline cannot be created."),(0,r.kt)("p",null,"Now we complete the component wrapper by writing down the argument, its type and the type to be returned as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("p",null,"In Kubeflow, you can only use types that can be expressed in json as return values. The most commonly used and recommended types are as follows:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"int"),(0,r.kt)("li",{parentName:"ul"},"float"),(0,r.kt)("li",{parentName:"ul"},"str")),(0,r.kt)("p",null,"If you want to return multiple values instead of a single value, you must use ",(0,r.kt)("inlineCode",{parentName:"p"},"collections.namedtuple"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the Kubeflow official documentation ",(0,r.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow Official Documentation"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For example, if you want to write a component that returns the quotient and remainder of a number when divided by 2, it should be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Now you have to convert the written component into a format that can be used in Kubeflow. The conversion can be done through ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func"),". This converted form can be imported as a function in Python and used in the pipeline."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,r.kt)("p",null,"If it is not possible to share with Python code, you can share components with a YAML file and use them.\nTo do this, first convert the component to a YAML file and then use it in the pipeline with ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file"),"."),(0,r.kt)("p",null,"First, let's explain the process of converting the written component to a YAML file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,r.kt)("p",null,"If you run the Python code you wrote, a file called ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," will be created. When you check the file, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,r.kt)("p",null,"Now the generated file can be shared and used in the pipeline as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,r.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,r.kt)("p",null,"In Kubeflow, the execution order of components is as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull "),": Pull the image containing the execution environment information of the defined component."),(0,r.kt)("li",{parentName:"ol"},"Run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"),": Execute the component's content within the pulled image.")),(0,r.kt)("p",null,"Taking ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," as an example, the default image in ",(0,r.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7"),", so the component's content will be executed based on that image."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,r.kt)("h2",{id:"references"},"References:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/0e729158.e6781735.js b/en/assets/js/0e729158.5de5ac42.js similarity index 98% rename from en/assets/js/0e729158.e6781735.js rename to en/assets/js/0e729158.5de5ac42.js index f2dbc75c..f05acb5d 100644 --- a/en/assets/js/0e729158.e6781735.js +++ b/en/assets/js/0e729158.5de5ac42.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1060],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function s(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var o=r.createContext({}),i=function(e){var t=r.useContext(o),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=i(e.components);return r.createElement(o.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,s=e.originalType,o=e.parentName,c=u(e,["components","mdxType","originalType","parentName"]),p=i(n),k=a,b=p["".concat(o,".").concat(k)]||p[k]||d[k]||s;return n?r.createElement(b,l(l({ref:t},c),{},{components:n})):r.createElement(b,l({ref:t},c))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var s=n.length,l=new Array(s);l[0]=k;var u={};for(var o in t)hasOwnProperty.call(t,o)&&(u[o]=t[o]);u.originalType=e,u[p]="string"==typeof e?e:a,l[1]=u;for(var i=2;i{n.r(t),n.d(t,{assets:()=>o,contentTitle:()=>l,default:()=>d,frontMatter:()=>s,metadata:()=>u,toc:()=>i});var r=n(7462),a=(n(7294),n(3905));const s={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},l=void 0,u={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},o={},i=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:i},p="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before building a Kubernetes cluster, install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," and install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Change the configuration of the network for Kubernetes."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{n.d(t,{Zo:()=>c,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function s(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var o=r.createContext({}),i=function(e){var t=r.useContext(o),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=i(e.components);return r.createElement(o.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,s=e.originalType,o=e.parentName,c=u(e,["components","mdxType","originalType","parentName"]),p=i(n),k=a,b=p["".concat(o,".").concat(k)]||p[k]||d[k]||s;return n?r.createElement(b,l(l({ref:t},c),{},{components:n})):r.createElement(b,l({ref:t},c))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var s=n.length,l=new Array(s);l[0]=k;var u={};for(var o in t)hasOwnProperty.call(t,o)&&(u[o]=t[o]);u.originalType=e,u[p]="string"==typeof e?e:a,l[1]=u;for(var i=2;i{n.r(t),n.d(t,{assets:()=>o,contentTitle:()=>l,default:()=>d,frontMatter:()=>s,metadata:()=>u,toc:()=>i});var r=n(7462),a=(n(7294),n(3905));const s={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},l=void 0,u={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},o={},i=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:i},p="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before building a Kubernetes cluster, install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," and install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Change the configuration of the network for Kubernetes."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{t.d(n,{Zo:()=>p,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,s=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=d(t),u=o,b=c["".concat(i,".").concat(u)]||c[u]||m[u]||s;return t?a.createElement(b,r(r({ref:n},p),{},{components:t})):a.createElement(b,r({ref:n},p))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[c]="string"==typeof e?e:o,r[1]=l;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>m,frontMatter:()=>s,metadata:()=>l,toc:()=>d});var a=t(7462),o=(t(7294),t(3905));const s={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},r=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/en/docs/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-seldon.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/en/docs/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/en/docs/setup-components/install-components-pg"}},i={},d=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Installing Seldon-Core",id:"installing-seldon-core",level:2},{value:"Adding Ambassador to the Helm Repository",id:"adding-ambassador-to-the-helm-repository",level:3},{value:"Update Ambassador - Helm Repository",id:"update-ambassador---helm-repository",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],p={toc:d},c="wrapper";function m(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,a.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core is one of the open source frameworks that can deploy and manage numerous machine learning models in Kubernetes environments.",(0,o.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the official ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"product description page")," and ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"GitHub")," of Seldon-Core and API Deployment part."),(0,o.kt)("h2",{id:"installing-seldon-core"},"Installing Seldon-Core"),(0,o.kt)("p",null,"In order to use Seldon-Core, modules such as Ambassador, which is responsible for Ingress of Kubernetes, and Istio are required ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"here"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core officially supports only Ambassador and Istio, and ",(0,o.kt)("em",{parentName:"p"},"MLOps for everyone")," will use Ambassador to use Seldon-core, so we will install Ambassador."),(0,o.kt)("h3",{id:"adding-ambassador-to-the-helm-repository"},"Adding Ambassador to the Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"If the following message is displayed, it means it has been added normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-ambassador---helm-repository"},"Update Ambassador - Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is output, it means that the update has been completed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"Install version 6.9.3 of the Ambassador Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"Wait until four pods become running in the seldon-system."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"Install version 1.11.2 of the seldon-core-operator Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"Wait until one seldon-controller-manager pod is Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7558],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,s=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=d(t),u=o,b=c["".concat(i,".").concat(u)]||c[u]||m[u]||s;return t?a.createElement(b,r(r({ref:n},p),{},{components:t})):a.createElement(b,r({ref:n},p))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[c]="string"==typeof e?e:o,r[1]=l;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>m,frontMatter:()=>s,metadata:()=>l,toc:()=>d});var a=t(7462),o=(t(7294),t(3905));const s={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},r=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/en/docs/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-seldon.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/en/docs/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/en/docs/setup-components/install-components-pg"}},i={},d=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Installing Seldon-Core",id:"installing-seldon-core",level:2},{value:"Adding Ambassador to the Helm Repository",id:"adding-ambassador-to-the-helm-repository",level:3},{value:"Update Ambassador - Helm Repository",id:"update-ambassador---helm-repository",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],p={toc:d},c="wrapper";function m(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,a.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core is one of the open source frameworks that can deploy and manage numerous machine learning models in Kubernetes environments.",(0,o.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the official ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"product description page")," and ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"GitHub")," of Seldon-Core and API Deployment part."),(0,o.kt)("h2",{id:"installing-seldon-core"},"Installing Seldon-Core"),(0,o.kt)("p",null,"In order to use Seldon-Core, modules such as Ambassador, which is responsible for Ingress of Kubernetes, and Istio are required ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"here"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core officially supports only Ambassador and Istio, and ",(0,o.kt)("em",{parentName:"p"},"MLOps for everyone")," will use Ambassador to use Seldon-core, so we will install Ambassador."),(0,o.kt)("h3",{id:"adding-ambassador-to-the-helm-repository"},"Adding Ambassador to the Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"If the following message is displayed, it means it has been added normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-ambassador---helm-repository"},"Update Ambassador - Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is output, it means that the update has been completed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"Install version 6.9.3 of the Ambassador Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"Wait until four pods become running in the seldon-system."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"Install version 1.11.2 of the seldon-core-operator Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"Wait until one seldon-controller-manager pod is Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/10a35dc9.602a320d.js b/en/assets/js/10a35dc9.f922fc3e.js similarity index 97% rename from en/assets/js/10a35dc9.602a320d.js rename to en/assets/js/10a35dc9.f922fc3e.js index d5956e90..65791e01 100644 --- a/en/assets/js/10a35dc9.602a320d.js +++ b/en/assets/js/10a35dc9.f922fc3e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9800],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var i=r.createContext({}),l=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=l(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},b=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,o=e.originalType,i=e.parentName,p=u(e,["components","mdxType","originalType","parentName"]),c=l(n),b=s,m=c["".concat(i,".").concat(b)]||c[b]||d[b]||o;return n?r.createElement(m,a(a({ref:t},p),{},{components:n})):r.createElement(m,a({ref:t},p))}));function m(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var o=n.length,a=new Array(o);a[0]=b;var u={};for(var i in t)hasOwnProperty.call(t,i)&&(u[i]=t[i]);u.originalType=e,u[c]="string"==typeof e?e:s,a[1]=u;for(var l=2;l{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>a,default:()=>d,frontMatter:()=>o,metadata:()=>u,toc:()=>l});var r=n(7462),s=(n(7294),n(3905));const o={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},a=void 0,u={unversionedId:"setup-kubernetes/kubernetes",id:"setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/en/docs/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/en/docs/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/en/docs/setup-kubernetes/install-prerequisite"}},i={},l=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],p={toc:l},c="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,s.kt)("p",null,"For those learning Kubernetes for the first time, the first barrier to entry is setting up a Kubernetes practice environment."),(0,s.kt)("p",null,"The official tool that supports building a production-level Kubernetes cluster is kubeadm, but there are also tools such as kubespray and kops that help users set up more easily, and tools such as k3s, minikube, microk8s, and kind that help you set up a compact Kubernetes cluster easily for learning purposes."),(0,s.kt)("p",null,"Each tool has its own advantages and disadvantages, so considering the preferences of each user, this article will use three tools: kubeadm, k3s, and minikube to set up a Kubernetes cluster.\nFor detailed comparisons of each tool, please refer to the official Kubernetes ",(0,s.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"documentation"),"."),(0,s.kt)("p",null,(0,s.kt)("em",{parentName:"p"},"MLOps for ALL")," recommends ",(0,s.kt)("strong",{parentName:"p"},"k3s")," as a tool that is easy to use when setting up a Kubernetes cluster."),(0,s.kt)("p",null,"If you want to use all the features of Kubernetes and configure the nodes, we recommend ",(0,s.kt)("strong",{parentName:"p"},"kubeadm"),".",(0,s.kt)("br",{parentName:"p"}),"\n",(0,s.kt)("strong",{parentName:"p"},"minikube")," has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe."),(0,s.kt)("p",null,"In this ",(0,s.kt)("em",{parentName:"p"},"MLOps for ALL"),", in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools."),(0,s.kt)("p",null,"The scope of this ",(0,s.kt)("strong",{parentName:"p"},"Setup Kubernetes")," section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster."),(0,s.kt)("p",null,"The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,s.kt)("p",null,"Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with."))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9800],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var i=r.createContext({}),l=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=l(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},b=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,o=e.originalType,i=e.parentName,p=u(e,["components","mdxType","originalType","parentName"]),c=l(n),b=s,m=c["".concat(i,".").concat(b)]||c[b]||d[b]||o;return n?r.createElement(m,a(a({ref:t},p),{},{components:n})):r.createElement(m,a({ref:t},p))}));function m(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var o=n.length,a=new Array(o);a[0]=b;var u={};for(var i in t)hasOwnProperty.call(t,i)&&(u[i]=t[i]);u.originalType=e,u[c]="string"==typeof e?e:s,a[1]=u;for(var l=2;l{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>a,default:()=>d,frontMatter:()=>o,metadata:()=>u,toc:()=>l});var r=n(7462),s=(n(7294),n(3905));const o={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},a=void 0,u={unversionedId:"setup-kubernetes/kubernetes",id:"setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/en/docs/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/en/docs/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/en/docs/setup-kubernetes/install-prerequisite"}},i={},l=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],p={toc:l},c="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,s.kt)("p",null,"For those learning Kubernetes for the first time, the first barrier to entry is setting up a Kubernetes practice environment."),(0,s.kt)("p",null,"The official tool that supports building a production-level Kubernetes cluster is kubeadm, but there are also tools such as kubespray and kops that help users set up more easily, and tools such as k3s, minikube, microk8s, and kind that help you set up a compact Kubernetes cluster easily for learning purposes."),(0,s.kt)("p",null,"Each tool has its own advantages and disadvantages, so considering the preferences of each user, this article will use three tools: kubeadm, k3s, and minikube to set up a Kubernetes cluster.\nFor detailed comparisons of each tool, please refer to the official Kubernetes ",(0,s.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"documentation"),"."),(0,s.kt)("p",null,(0,s.kt)("em",{parentName:"p"},"MLOps for ALL")," recommends ",(0,s.kt)("strong",{parentName:"p"},"k3s")," as a tool that is easy to use when setting up a Kubernetes cluster."),(0,s.kt)("p",null,"If you want to use all the features of Kubernetes and configure the nodes, we recommend ",(0,s.kt)("strong",{parentName:"p"},"kubeadm"),".",(0,s.kt)("br",{parentName:"p"}),"\n",(0,s.kt)("strong",{parentName:"p"},"minikube")," has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe."),(0,s.kt)("p",null,"In this ",(0,s.kt)("em",{parentName:"p"},"MLOps for ALL"),", in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools."),(0,s.kt)("p",null,"The scope of this ",(0,s.kt)("strong",{parentName:"p"},"Setup Kubernetes")," section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster."),(0,s.kt)("p",null,"The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,s.kt)("p",null,"Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/11b44e77.d3ff2fcd.js b/en/assets/js/11b44e77.6235d7bd.js similarity index 99% rename from en/assets/js/11b44e77.d3ff2fcd.js rename to en/assets/js/11b44e77.6235d7bd.js index c744843d..b48f5eaa 100644 --- a/en/assets/js/11b44e77.d3ff2fcd.js +++ b/en/assets/js/11b44e77.6235d7bd.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7775],{3905:(e,n,t)=>{t.d(n,{Zo:()=>o,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),m=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},o=function(e){var n=m(e.components);return r.createElement(s.Provider,{value:n},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,o=i(e,["components","mdxType","originalType","parentName"]),_=m(t),d=a,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||p;return t?r.createElement(b,u(u({ref:n},o),{},{components:t})):r.createElement(b,u({ref:n},o))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[_]="string"==typeof e?e:a,u[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>p,metadata:()=>i,toc:()=>m});var r=t(7462),a=(t(7294),t(3905));const p={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/en/docs/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/en/docs/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/en/docs/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],o={toc:m},_="wrapper";function l(e){let{components:n,...p}=e;return(0,a.kt)(_,(0,r.Z)({},o,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,a.kt)("p",null,"In this page, we will look at values that can be set in the pipeline."),(0,a.kt)("h2",{id:"display-name"},"Display Name"),(0,a.kt)("p",null,"Created within the pipeline, components have two names:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"task_name: the function name when writing the component"),(0,a.kt)("li",{parentName:"ul"},"display_name: the name that appears in the kubeflow UI")),(0,a.kt)("p",null,"For example, in the case where both components are set to Print and return number, it is difficult to tell which component is 1 or 2."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"run-7",src:t(1847).Z,width:"3408",height:"2156"})),(0,a.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,a.kt)("p",null,"The solution for this is the display_name.",(0,a.kt)("br",{parentName:"p"}),"\n","We can set the display_name in the pipeline by using the set_display_name ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute")," of the component."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you run this script and check the resulting ",(0,a.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),", it would be like this."),(0,a.kt)("p",null,(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,a.kt)("p",null,"If compared with the previous file, the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"pipelines.kubeflow.org/task_display_name"))," key has been newly created."),(0,a.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,a.kt)("p",null,"We will upload the version of the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"pipeline")," using the files we created earlier."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"adv-pipeline-0.png",src:t(959).Z,width:"3360",height:"2100"})),(0,a.kt)("p",null,"As you can see, the configured name is displayed as shown above."),(0,a.kt)("h2",{id:"resources"},"Resources"),(0,a.kt)("h3",{id:"gpu"},"GPU"),(0,a.kt)("p",null,"By default, when the pipeline runs components as Kubernetes pods, it uses the default resource specifications.",(0,a.kt)("br",{parentName:"p"}),"\n","If you need to train a model using a GPU and the Kubernetes environment doesn't allocate a GPU, the training may not be performed correctly.",(0,a.kt)("br",{parentName:"p"}),"\n","To address this, you can use the ",(0,a.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute")," to set the GPU limit."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you execute the above script, you can see that the resources has been added with ",(0,a.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," in the generated file when you look closely at ",(0,a.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),".\nThrough this, you can allocate a GPU."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,a.kt)("h3",{id:"cpu"},"CPU"),(0,a.kt)("p",null,"The function to set the number of CPUs can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," attribute ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),".",(0,a.kt)("br",{parentName:"p"}),"\n","The difference from GPUs is that the input must be a string, not an int."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The changed part only can be confirmed as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,a.kt)("h3",{id:"memory"},"Memory"),(0,a.kt)("p",null,"Memory can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,a.kt)("p",null,"The changed parts are as follows if checked."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},959:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},1847:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7775],{3905:(e,n,t)=>{t.d(n,{Zo:()=>o,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),m=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},o=function(e){var n=m(e.components);return r.createElement(s.Provider,{value:n},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,o=i(e,["components","mdxType","originalType","parentName"]),_=m(t),d=a,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||p;return t?r.createElement(b,u(u({ref:n},o),{},{components:t})):r.createElement(b,u({ref:n},o))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[_]="string"==typeof e?e:a,u[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>p,metadata:()=>i,toc:()=>m});var r=t(7462),a=(t(7294),t(3905));const p={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/en/docs/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/en/docs/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/en/docs/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],o={toc:m},_="wrapper";function l(e){let{components:n,...p}=e;return(0,a.kt)(_,(0,r.Z)({},o,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,a.kt)("p",null,"In this page, we will look at values that can be set in the pipeline."),(0,a.kt)("h2",{id:"display-name"},"Display Name"),(0,a.kt)("p",null,"Created within the pipeline, components have two names:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"task_name: the function name when writing the component"),(0,a.kt)("li",{parentName:"ul"},"display_name: the name that appears in the kubeflow UI")),(0,a.kt)("p",null,"For example, in the case where both components are set to Print and return number, it is difficult to tell which component is 1 or 2."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"run-7",src:t(1847).Z,width:"3408",height:"2156"})),(0,a.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,a.kt)("p",null,"The solution for this is the display_name.",(0,a.kt)("br",{parentName:"p"}),"\n","We can set the display_name in the pipeline by using the set_display_name ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute")," of the component."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you run this script and check the resulting ",(0,a.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),", it would be like this."),(0,a.kt)("p",null,(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,a.kt)("p",null,"If compared with the previous file, the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"pipelines.kubeflow.org/task_display_name"))," key has been newly created."),(0,a.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,a.kt)("p",null,"We will upload the version of the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"pipeline")," using the files we created earlier."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"adv-pipeline-0.png",src:t(959).Z,width:"3360",height:"2100"})),(0,a.kt)("p",null,"As you can see, the configured name is displayed as shown above."),(0,a.kt)("h2",{id:"resources"},"Resources"),(0,a.kt)("h3",{id:"gpu"},"GPU"),(0,a.kt)("p",null,"By default, when the pipeline runs components as Kubernetes pods, it uses the default resource specifications.",(0,a.kt)("br",{parentName:"p"}),"\n","If you need to train a model using a GPU and the Kubernetes environment doesn't allocate a GPU, the training may not be performed correctly.",(0,a.kt)("br",{parentName:"p"}),"\n","To address this, you can use the ",(0,a.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute")," to set the GPU limit."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you execute the above script, you can see that the resources has been added with ",(0,a.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," in the generated file when you look closely at ",(0,a.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),".\nThrough this, you can allocate a GPU."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,a.kt)("h3",{id:"cpu"},"CPU"),(0,a.kt)("p",null,"The function to set the number of CPUs can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," attribute ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),".",(0,a.kt)("br",{parentName:"p"}),"\n","The difference from GPUs is that the input must be a string, not an int."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The changed part only can be confirmed as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,a.kt)("h3",{id:"memory"},"Memory"),(0,a.kt)("p",null,"Memory can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,a.kt)("p",null,"The changed parts are as follows if checked."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},959:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},1847:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/160bf777.f9f374b7.js b/en/assets/js/160bf777.e9823c6a.js similarity index 99% rename from en/assets/js/160bf777.f9f374b7.js rename to en/assets/js/160bf777.e9823c6a.js index b32cff31..2d0f64a5 100644 --- a/en/assets/js/160bf777.f9f374b7.js +++ b/en/assets/js/160bf777.e9823c6a.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7005],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),o=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},m=function(e){var n=o(e.components);return r.createElement(s.Provider,{value:n},e.children)},l="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),l=o(t),d=a,b=l["".concat(s,".").concat(d)]||l[d]||_[d]||p;return t?r.createElement(b,u(u({ref:n},m),{},{components:t})):r.createElement(b,u({ref:n},m))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[l]="string"==typeof e?e:a,u[1]=i;for(var o=2;o{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>_,frontMatter:()=>p,metadata:()=>i,toc:()=>o});var r=t(7462),a=(t(7294),t(3905));const p={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"version-1.0/kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/en/docs/1.0/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/en/docs/1.0/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/en/docs/1.0/kubeflow/basic-pipeline-upload"}},s={},o=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],m={toc:o},l="wrapper";function _(e){let{components:n,...p}=e;return(0,a.kt)(l,(0,r.Z)({},m,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"Components do not run independently but rather as components of a pipeline. Therefore, in order to run a component, a pipeline must be written.\nAnd in order to write a pipeline, a set of components and the order of execution of those components is necessary."),(0,a.kt)("p",null,"On this page, we will create a pipeline with a component that takes a number as input and outputs it, and a component that takes two numbers from two components and outputs the sum."),(0,a.kt)("h2",{id:"component-set"},"Component Set"),(0,a.kt)("p",null,"First, let's create the components that will be used in the pipeline."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,a.kt)("p",{parentName:"li"},"This component prints and returns the input number.",(0,a.kt)("br",{parentName:"p"}),"\n","Since the component returns the input value, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,a.kt)("p",{parentName:"li"},"This component calculates the sum of two input numbers and prints it.",(0,a.kt)("br",{parentName:"p"}),"\n","Similarly, since the component returns the sum, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,a.kt)("h2",{id:"component-order"},"Component Order"),(0,a.kt)("h3",{id:"define-order"},"Define Order"),(0,a.kt)("p",null,"If you have created the necessary set of components, the next step is to define their sequence.",(0,a.kt)("br",{parentName:"p"}),"\n","The diagram below represents the order of the pipeline components to be created on this page."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"pipeline-0.png",src:t(7114).Z,width:"586",height:"262"})),(0,a.kt)("h3",{id:"single-output"},"Single Output"),(0,a.kt)("p",null,"Now let's translate this sequence into code."),(0,a.kt)("p",null,"First, writing ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," from the picture above would look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,a.kt)("p",null,"Run the component and store the return values in ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"number_2_result"),", respectively.",(0,a.kt)("br",{parentName:"p"}),"\n","The return value of the stored ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," can be used through ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output"),"."),(0,a.kt)("h3",{id:"multi-output"},"Multi Output"),(0,a.kt)("p",null,"In the example above, the components return a single value, so it can be directly used with ",(0,a.kt)("inlineCode",{parentName:"p"},"output"),".",(0,a.kt)("br",{parentName:"p"}),"\n","However, if there are multiple return values, they will be stored in ",(0,a.kt)("inlineCode",{parentName:"p"},"outputs")," as a dictionary. You can use the keys to access the desired return values.\nLet's consider an example with a component that returns multiple values, like the one mentioned in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-component#define-a-standalone-python-function"},"component")," definition. The ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," component returns ",(0,a.kt)("inlineCode",{parentName:"p"},"quotient")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"remainder"),". Here's an example of passing these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,a.kt)("p",null,"Store the result of ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"divided_result")," and you can get the values of each by ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]')," and ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"."),(0,a.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,a.kt)("p",null,"Now, let's get back to the main topic and pass the result of these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("p",null,"Next, gather the necessary Configs for each component and define it as a pipeline Config."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,a.kt)("p",null,"Finally, convert it into a format that can be used in Kubeflow. The conversion can be done using the ",(0,a.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," function."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,a.kt)("p",null,"In order to run a pipeline in Kubeflow, it needs to be compiled into the designated yaml format as only yaml format is possible, so the created pipeline needs to be compiled into a specific yaml format.\nCompilation can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("h2",{id:"conclusion"},"Conclusion"),(0,a.kt)("p",null,"As explained earlier, if we gather the content into a Python code, it will look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The compiled result is as follows."),(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}_.isMDXComponent=!0},7114:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7005],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),o=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},m=function(e){var n=o(e.components);return r.createElement(s.Provider,{value:n},e.children)},l="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),l=o(t),d=a,b=l["".concat(s,".").concat(d)]||l[d]||_[d]||p;return t?r.createElement(b,u(u({ref:n},m),{},{components:t})):r.createElement(b,u({ref:n},m))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[l]="string"==typeof e?e:a,u[1]=i;for(var o=2;o{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>_,frontMatter:()=>p,metadata:()=>i,toc:()=>o});var r=t(7462),a=(t(7294),t(3905));const p={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"version-1.0/kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/en/docs/1.0/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/en/docs/1.0/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/en/docs/1.0/kubeflow/basic-pipeline-upload"}},s={},o=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],m={toc:o},l="wrapper";function _(e){let{components:n,...p}=e;return(0,a.kt)(l,(0,r.Z)({},m,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"Components do not run independently but rather as components of a pipeline. Therefore, in order to run a component, a pipeline must be written.\nAnd in order to write a pipeline, a set of components and the order of execution of those components is necessary."),(0,a.kt)("p",null,"On this page, we will create a pipeline with a component that takes a number as input and outputs it, and a component that takes two numbers from two components and outputs the sum."),(0,a.kt)("h2",{id:"component-set"},"Component Set"),(0,a.kt)("p",null,"First, let's create the components that will be used in the pipeline."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,a.kt)("p",{parentName:"li"},"This component prints and returns the input number.",(0,a.kt)("br",{parentName:"p"}),"\n","Since the component returns the input value, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,a.kt)("p",{parentName:"li"},"This component calculates the sum of two input numbers and prints it.",(0,a.kt)("br",{parentName:"p"}),"\n","Similarly, since the component returns the sum, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,a.kt)("h2",{id:"component-order"},"Component Order"),(0,a.kt)("h3",{id:"define-order"},"Define Order"),(0,a.kt)("p",null,"If you have created the necessary set of components, the next step is to define their sequence.",(0,a.kt)("br",{parentName:"p"}),"\n","The diagram below represents the order of the pipeline components to be created on this page."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"pipeline-0.png",src:t(7114).Z,width:"586",height:"262"})),(0,a.kt)("h3",{id:"single-output"},"Single Output"),(0,a.kt)("p",null,"Now let's translate this sequence into code."),(0,a.kt)("p",null,"First, writing ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," from the picture above would look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,a.kt)("p",null,"Run the component and store the return values in ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"number_2_result"),", respectively.",(0,a.kt)("br",{parentName:"p"}),"\n","The return value of the stored ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," can be used through ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output"),"."),(0,a.kt)("h3",{id:"multi-output"},"Multi Output"),(0,a.kt)("p",null,"In the example above, the components return a single value, so it can be directly used with ",(0,a.kt)("inlineCode",{parentName:"p"},"output"),".",(0,a.kt)("br",{parentName:"p"}),"\n","However, if there are multiple return values, they will be stored in ",(0,a.kt)("inlineCode",{parentName:"p"},"outputs")," as a dictionary. You can use the keys to access the desired return values.\nLet's consider an example with a component that returns multiple values, like the one mentioned in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-component#define-a-standalone-python-function"},"component")," definition. The ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," component returns ",(0,a.kt)("inlineCode",{parentName:"p"},"quotient")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"remainder"),". Here's an example of passing these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,a.kt)("p",null,"Store the result of ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"divided_result")," and you can get the values of each by ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]')," and ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"."),(0,a.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,a.kt)("p",null,"Now, let's get back to the main topic and pass the result of these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("p",null,"Next, gather the necessary Configs for each component and define it as a pipeline Config."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,a.kt)("p",null,"Finally, convert it into a format that can be used in Kubeflow. The conversion can be done using the ",(0,a.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," function."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,a.kt)("p",null,"In order to run a pipeline in Kubeflow, it needs to be compiled into the designated yaml format as only yaml format is possible, so the created pipeline needs to be compiled into a specific yaml format.\nCompilation can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("h2",{id:"conclusion"},"Conclusion"),(0,a.kt)("p",null,"As explained earlier, if we gather the content into a Python code, it will look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The compiled result is as follows."),(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}_.isMDXComponent=!0},7114:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/1a5d547c.d3e9f87a.js b/en/assets/js/1a5d547c.bd8579e4.js similarity index 99% rename from en/assets/js/1a5d547c.d3e9f87a.js rename to en/assets/js/1a5d547c.bd8579e4.js index 45e510c8..4ab952dc 100644 --- a/en/assets/js/1a5d547c.d3e9f87a.js +++ b/en/assets/js/1a5d547c.bd8579e4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8424],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),o=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},m=function(e){var n=o(e.components);return r.createElement(s.Provider,{value:n},e.children)},l="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),l=o(t),d=a,b=l["".concat(s,".").concat(d)]||l[d]||_[d]||p;return t?r.createElement(b,u(u({ref:n},m),{},{components:t})):r.createElement(b,u({ref:n},m))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[l]="string"==typeof e?e:a,u[1]=i;for(var o=2;o{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>_,frontMatter:()=>p,metadata:()=>i,toc:()=>o});var r=t(7462),a=(t(7294),t(3905));const p={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/en/docs/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/en/docs/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/en/docs/kubeflow/basic-pipeline-upload"}},s={},o=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],m={toc:o},l="wrapper";function _(e){let{components:n,...p}=e;return(0,a.kt)(l,(0,r.Z)({},m,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"Components do not run independently but rather as components of a pipeline. Therefore, in order to run a component, a pipeline must be written.\nAnd in order to write a pipeline, a set of components and the order of execution of those components is necessary."),(0,a.kt)("p",null,"On this page, we will create a pipeline with a component that takes a number as input and outputs it, and a component that takes two numbers from two components and outputs the sum."),(0,a.kt)("h2",{id:"component-set"},"Component Set"),(0,a.kt)("p",null,"First, let's create the components that will be used in the pipeline."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,a.kt)("p",{parentName:"li"},"This component prints and returns the input number.",(0,a.kt)("br",{parentName:"p"}),"\n","Since the component returns the input value, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,a.kt)("p",{parentName:"li"},"This component calculates the sum of two input numbers and prints it.",(0,a.kt)("br",{parentName:"p"}),"\n","Similarly, since the component returns the sum, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,a.kt)("h2",{id:"component-order"},"Component Order"),(0,a.kt)("h3",{id:"define-order"},"Define Order"),(0,a.kt)("p",null,"If you have created the necessary set of components, the next step is to define their sequence.",(0,a.kt)("br",{parentName:"p"}),"\n","The diagram below represents the order of the pipeline components to be created on this page."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"pipeline-0.png",src:t(9737).Z,width:"586",height:"262"})),(0,a.kt)("h3",{id:"single-output"},"Single Output"),(0,a.kt)("p",null,"Now let's translate this sequence into code."),(0,a.kt)("p",null,"First, writing ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," from the picture above would look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,a.kt)("p",null,"Run the component and store the return values in ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"number_2_result"),", respectively.",(0,a.kt)("br",{parentName:"p"}),"\n","The return value of the stored ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," can be used through ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output"),"."),(0,a.kt)("h3",{id:"multi-output"},"Multi Output"),(0,a.kt)("p",null,"In the example above, the components return a single value, so it can be directly used with ",(0,a.kt)("inlineCode",{parentName:"p"},"output"),".",(0,a.kt)("br",{parentName:"p"}),"\n","However, if there are multiple return values, they will be stored in ",(0,a.kt)("inlineCode",{parentName:"p"},"outputs")," as a dictionary. You can use the keys to access the desired return values.\nLet's consider an example with a component that returns multiple values, like the one mentioned in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-component#define-a-standalone-python-function"},"component")," definition. The ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," component returns ",(0,a.kt)("inlineCode",{parentName:"p"},"quotient")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"remainder"),". Here's an example of passing these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,a.kt)("p",null,"Store the result of ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"divided_result")," and you can get the values of each by ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]')," and ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"."),(0,a.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,a.kt)("p",null,"Now, let's get back to the main topic and pass the result of these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("p",null,"Next, gather the necessary Configs for each component and define it as a pipeline Config."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,a.kt)("p",null,"Finally, convert it into a format that can be used in Kubeflow. The conversion can be done using the ",(0,a.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," function."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,a.kt)("p",null,"In order to run a pipeline in Kubeflow, it needs to be compiled into the designated yaml format as only yaml format is possible, so the created pipeline needs to be compiled into a specific yaml format.\nCompilation can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("h2",{id:"conclusion"},"Conclusion"),(0,a.kt)("p",null,"As explained earlier, if we gather the content into a Python code, it will look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The compiled result is as follows."),(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}_.isMDXComponent=!0},9737:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8424],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),o=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},m=function(e){var n=o(e.components);return r.createElement(s.Provider,{value:n},e.children)},l="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),l=o(t),d=a,b=l["".concat(s,".").concat(d)]||l[d]||_[d]||p;return t?r.createElement(b,u(u({ref:n},m),{},{components:t})):r.createElement(b,u({ref:n},m))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[l]="string"==typeof e?e:a,u[1]=i;for(var o=2;o{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>_,frontMatter:()=>p,metadata:()=>i,toc:()=>o});var r=t(7462),a=(t(7294),t(3905));const p={title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/basic-pipeline",id:"kubeflow/basic-pipeline",title:"5. Pipeline - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline",permalink:"/en/docs/kubeflow/basic-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Pipeline - Write",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Component - Write",permalink:"/en/docs/kubeflow/basic-component"},next:{title:"6. Pipeline - Upload",permalink:"/en/docs/kubeflow/basic-pipeline-upload"}},s={},o=[{value:"Pipeline",id:"pipeline",level:2},{value:"Component Set",id:"component-set",level:2},{value:"Component Order",id:"component-order",level:2},{value:"Define Order",id:"define-order",level:3},{value:"Single Output",id:"single-output",level:3},{value:"Multi Output",id:"multi-output",level:3},{value:"Write to python code",id:"write-to-python-code",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:2},{value:"Conclusion",id:"conclusion",level:2}],m={toc:o},l="wrapper";function _(e){let{components:n,...p}=e;return(0,a.kt)(l,(0,r.Z)({},m,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"Components do not run independently but rather as components of a pipeline. Therefore, in order to run a component, a pipeline must be written.\nAnd in order to write a pipeline, a set of components and the order of execution of those components is necessary."),(0,a.kt)("p",null,"On this page, we will create a pipeline with a component that takes a number as input and outputs it, and a component that takes two numbers from two components and outputs the sum."),(0,a.kt)("h2",{id:"component-set"},"Component Set"),(0,a.kt)("p",null,"First, let's create the components that will be used in the pipeline."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number")),(0,a.kt)("p",{parentName:"li"},"This component prints and returns the input number.",(0,a.kt)("br",{parentName:"p"}),"\n","Since the component returns the input value, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers")),(0,a.kt)("p",{parentName:"li"},"This component calculates the sum of two input numbers and prints it.",(0,a.kt)("br",{parentName:"p"}),"\n","Similarly, since the component returns the sum, we specify ",(0,a.kt)("inlineCode",{parentName:"p"},"int")," as the return type hint."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-python"},"@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_num = number_1 + number_2\n print(sum_num)\n return sum_num\n")))),(0,a.kt)("h2",{id:"component-order"},"Component Order"),(0,a.kt)("h3",{id:"define-order"},"Define Order"),(0,a.kt)("p",null,"If you have created the necessary set of components, the next step is to define their sequence.",(0,a.kt)("br",{parentName:"p"}),"\n","The diagram below represents the order of the pipeline components to be created on this page."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"pipeline-0.png",src:t(9737).Z,width:"586",height:"262"})),(0,a.kt)("h3",{id:"single-output"},"Single Output"),(0,a.kt)("p",null,"Now let's translate this sequence into code."),(0,a.kt)("p",null,"First, writing ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_1")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number_2")," from the picture above would look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n")),(0,a.kt)("p",null,"Run the component and store the return values in ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"number_2_result"),", respectively.",(0,a.kt)("br",{parentName:"p"}),"\n","The return value of the stored ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_result")," can be used through ",(0,a.kt)("inlineCode",{parentName:"p"},"number_1_resulst.output"),"."),(0,a.kt)("h3",{id:"multi-output"},"Multi Output"),(0,a.kt)("p",null,"In the example above, the components return a single value, so it can be directly used with ",(0,a.kt)("inlineCode",{parentName:"p"},"output"),".",(0,a.kt)("br",{parentName:"p"}),"\n","However, if there are multiple return values, they will be stored in ",(0,a.kt)("inlineCode",{parentName:"p"},"outputs")," as a dictionary. You can use the keys to access the desired return values.\nLet's consider an example with a component that returns multiple values, like the one mentioned in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-component#define-a-standalone-python-function"},"component")," definition. The ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," component returns ",(0,a.kt)("inlineCode",{parentName:"p"},"quotient")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"remainder"),". Here's an example of passing these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"print_and_return_number"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'def multi_pipeline():\n divided_result = divde_and_return_number(number)\n num_1_result = print_and_return_number(divided_result.outputs["quotient"])\n num_2_result = print_and_return_number(divided_result.outputs["remainder"])\n')),(0,a.kt)("p",null,"Store the result of ",(0,a.kt)("inlineCode",{parentName:"p"},"divide_and_return_number")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"divided_result")," and you can get the values of each by ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["quotient"]')," and ",(0,a.kt)("inlineCode",{parentName:"p"},'divided_result.outputs["remainder"]'),"."),(0,a.kt)("h3",{id:"write-to-python-code"},"Write to python code"),(0,a.kt)("p",null,"Now, let's get back to the main topic and pass the result of these two values to ",(0,a.kt)("inlineCode",{parentName:"p"},"sum_and_print_numbers"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline():\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("p",null,"Next, gather the necessary Configs for each component and define it as a pipeline Config."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},"def example_pipeline(number_1: int, number_2:int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n")),(0,a.kt)("h2",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,a.kt)("p",null,"Finally, convert it into a format that can be used in Kubeflow. The conversion can be done using the ",(0,a.kt)("inlineCode",{parentName:"p"},"kfp.dsl.pipeline")," function."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n')),(0,a.kt)("p",null,"In order to run a pipeline in Kubeflow, it needs to be compiled into the designated yaml format as only yaml format is possible, so the created pipeline needs to be compiled into a specific yaml format.\nCompilation can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'if __name__ == "__main__":\n import kfp\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("h2",{id:"conclusion"},"Conclusion"),(0,a.kt)("p",null,"As explained earlier, if we gather the content into a Python code, it will look like this."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The compiled result is as follows."),(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-05T13:38:51.566777\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":\n "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(str(int_value), str(type(int_value))))\\n return\n str(int_value)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Print\n and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":\n "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf \\"%s\\"\n \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n", "def\n sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n'))))}_.isMDXComponent=!0},9737:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/pipeline-0-c62220ce65ed4a187b70947bccb0f1e6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/1aa635cc.5ce763b5.js b/en/assets/js/1aa635cc.4f8e1195.js similarity index 99% rename from en/assets/js/1aa635cc.5ce763b5.js rename to en/assets/js/1aa635cc.4f8e1195.js index d689fada..8bcc58a1 100644 --- a/en/assets/js/1aa635cc.5ce763b5.js +++ b/en/assets/js/1aa635cc.4f8e1195.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7986],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>g});var n=t(7294);function l(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function o(e){for(var a=1;a=0||(l[t]=e[t]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):o(o({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},m=n.forwardRef((function(e,a){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=p(t),m=l,g=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return t?n.createElement(g,o(o({ref:a},c),{},{components:t})):n.createElement(g,o({ref:a},c))}));function g(e,a){var t=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=m;var s={};for(var i in a)hasOwnProperty.call(a,i)&&(s[i]=a[i]);s.originalType=e,s[d]="string"==typeof e?e:l,o[1]=s;for(var p=2;p{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=t(7462),l=(t(7294),t(3905));const r={title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},o=void 0,s={unversionedId:"appendix/metallb",id:"version-1.0/appendix/metallb",title:"2. Install load balancer metallb for Bare Metal Cluster",description:"What is MetalLB?",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/en/docs/1.0/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/metallb.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Install Python virtual environment",permalink:"/en/docs/1.0/appendix/pyenv"},next:{title:"Further Readings",permalink:"/en/docs/1.0/further-readings/info"}},i={},p=[{value:"What is MetalLB?",id:"what-is-metallb",level:2},{value:"Installing MetalLB",id:"installing-metallb",level:2},{value:"Requirements",id:"requirements",level:2},{value:"MetalLB Installation",id:"metallb-installation",level:3},{value:"Preparation",id:"preparation",level:4},{value:"Installation - Manifest",id:"installation---manifest",level:3},{value:"1. Install MetalLB.",id:"1-install-metallb",level:4},{value:"2. Check installation.",id:"2-check-installation",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"Using MetalLB",id:"using-metallb",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},d="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(d,(0,n.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"what-is-metallb"},"What is MetalLB?"),(0,l.kt)("h2",{id:"installing-metallb"},"Installing MetalLB"),(0,l.kt)("p",null,"When using Kubernetes on cloud platforms such as AWS, GCP, and Azure, they provide their own load balancers. However, for on-premises clusters, an additional module needs to be installed to enable load balancing. ",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB")," is an open-source project that provides a load balancer for bare metal environments."),(0,l.kt)("h2",{id:"requirements"},"Requirements"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Requirement"),(0,l.kt)("th",{parentName:"tr",align:null},"Version and Details"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"Version >= v1.13.0 without built-in load balancing")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"Compatible Network CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 addresses"),(0,l.kt)("td",{parentName:"tr",align:null},"Used for MetalLB deployment")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP mode"),(0,l.kt)("td",{parentName:"tr",align:null},"One or more routers that support BGP functionality")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"TCP/UDP port 7946 open between nodes"),(0,l.kt)("td",{parentName:"tr",align:null},"Memberlist requirement")))),(0,l.kt)("h3",{id:"metallb-installation"},"MetalLB Installation"),(0,l.kt)("h4",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"If you are using kube-proxy in IPVS mode, starting from Kubernetes v1.14.2, you need to enable strict ARP mode.",(0,l.kt)("br",{parentName:"p"}),"\n","By default, Kube-router enables strict ARP, so this feature is not required if you are using Kube-router as a service proxy.",(0,l.kt)("br",{parentName:"p"}),"\n","Before applying strict ARP mode, check the current mode."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"If strictARP: false is outputted, run the following to change it to strictARP: true.\n(If strictARP: true is already outputted, you do not need to execute the following command)."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"If performed normally, it will be output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"installation---manifest"},"Installation - Manifest"),(0,l.kt)("h4",{id:"1-install-metallb"},"1. Install MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-check-installation"},"2. Check installation."),(0,l.kt)("p",null,"Wait until both pods in the metallb-system namespace are Running."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"When everthing is Running, similar results will be output."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"The components of the manifest are as follows:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a deployment, responsible for assigning external IP addresses for load balancing."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a daemonset, responsible for configuring network communication to connect external traffic and services.")))),(0,l.kt)("p",null,"The service includes RBAC permissions which are necessary for the controller and speaker components to operate."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"Setting up the load balancing policy of MetalLB can be done by deploying a configmap containing the related configuration information."),(0,l.kt)("p",null,"There are two modes that can be configured in MetalLB:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 Mode")," "),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP Mode")," ")),(0,l.kt)("p",null,"Here we will proceed with Layer 2 mode."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"In the Layer 2 mode, it is enough to set only the range of IP addresses to be used simply.",(0,l.kt)("br",{parentName:"p"}),"\n","When using Layer 2 mode, it is not necessary to bind IP to the network interface of the worker node, because it operates in a way that it responds directly to the ARP request of the local network and provides the computer's MAC address to the client."),(0,l.kt)("p",null,"The following ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," file is the configuration for MetalLB to provide control over the IP range of 192.168.35.100 ~ 192.168.35.110, and to configure Layer 2 mode."),(0,l.kt)("p",null,"In case the cluster node and the client node are separated, the range of 192.168.35.100 ~ 192.168.35.110 must be accessible by both the client node and the cluster node."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"Apply the above settings."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"If deployed normally, it will output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"using-metallb"},"Using MetalLB"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"First, before getting the load-balancing feature from MetalLB, check the current status by changing the type of the istio-ingressgateway service in the istio-system namespace to ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer")," to provide the Kubeflow Dashboard."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input a desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, IP addresses will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"If you check again, you will see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," to verify the following screen is output."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:t(41).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of minio-service, which provides the Dashboard of minio, in the kubeflow namespace to LoadBalancer to receive the load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item. If you do not add, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," to confirm the following screen is printed. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:t(4870).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of mlflow-server-service service in the mlflow-system namespace that provides the mlflow Dashboard to LoadBalancer to receive load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input the desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Open the web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," to confirm the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:t(2786).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"First, check the current status before changing the type of seldon-core-analytics-grafana service in the seldon-system namespace which provides Grafana's Dashboard to receive Load Balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"The type of the corresponding service is ClusterIP, and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If not, an IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If you check again, you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Open the Web Browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," to confirm that the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:t(1417).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},1417:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},41:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},4870:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},2786:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7986],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>g});var n=t(7294);function l(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function o(e){for(var a=1;a=0||(l[t]=e[t]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):o(o({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},m=n.forwardRef((function(e,a){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=p(t),m=l,g=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return t?n.createElement(g,o(o({ref:a},c),{},{components:t})):n.createElement(g,o({ref:a},c))}));function g(e,a){var t=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=m;var s={};for(var i in a)hasOwnProperty.call(a,i)&&(s[i]=a[i]);s.originalType=e,s[d]="string"==typeof e?e:l,o[1]=s;for(var p=2;p{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=t(7462),l=(t(7294),t(3905));const r={title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},o=void 0,s={unversionedId:"appendix/metallb",id:"version-1.0/appendix/metallb",title:"2. Install load balancer metallb for Bare Metal Cluster",description:"What is MetalLB?",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/en/docs/1.0/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/metallb.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Install Python virtual environment",permalink:"/en/docs/1.0/appendix/pyenv"},next:{title:"Further Readings",permalink:"/en/docs/1.0/further-readings/info"}},i={},p=[{value:"What is MetalLB?",id:"what-is-metallb",level:2},{value:"Installing MetalLB",id:"installing-metallb",level:2},{value:"Requirements",id:"requirements",level:2},{value:"MetalLB Installation",id:"metallb-installation",level:3},{value:"Preparation",id:"preparation",level:4},{value:"Installation - Manifest",id:"installation---manifest",level:3},{value:"1. Install MetalLB.",id:"1-install-metallb",level:4},{value:"2. Check installation.",id:"2-check-installation",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"Using MetalLB",id:"using-metallb",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},d="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(d,(0,n.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"what-is-metallb"},"What is MetalLB?"),(0,l.kt)("h2",{id:"installing-metallb"},"Installing MetalLB"),(0,l.kt)("p",null,"When using Kubernetes on cloud platforms such as AWS, GCP, and Azure, they provide their own load balancers. However, for on-premises clusters, an additional module needs to be installed to enable load balancing. ",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB")," is an open-source project that provides a load balancer for bare metal environments."),(0,l.kt)("h2",{id:"requirements"},"Requirements"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Requirement"),(0,l.kt)("th",{parentName:"tr",align:null},"Version and Details"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"Version >= v1.13.0 without built-in load balancing")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"Compatible Network CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 addresses"),(0,l.kt)("td",{parentName:"tr",align:null},"Used for MetalLB deployment")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP mode"),(0,l.kt)("td",{parentName:"tr",align:null},"One or more routers that support BGP functionality")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"TCP/UDP port 7946 open between nodes"),(0,l.kt)("td",{parentName:"tr",align:null},"Memberlist requirement")))),(0,l.kt)("h3",{id:"metallb-installation"},"MetalLB Installation"),(0,l.kt)("h4",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"If you are using kube-proxy in IPVS mode, starting from Kubernetes v1.14.2, you need to enable strict ARP mode.",(0,l.kt)("br",{parentName:"p"}),"\n","By default, Kube-router enables strict ARP, so this feature is not required if you are using Kube-router as a service proxy.",(0,l.kt)("br",{parentName:"p"}),"\n","Before applying strict ARP mode, check the current mode."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"If strictARP: false is outputted, run the following to change it to strictARP: true.\n(If strictARP: true is already outputted, you do not need to execute the following command)."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"If performed normally, it will be output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"installation---manifest"},"Installation - Manifest"),(0,l.kt)("h4",{id:"1-install-metallb"},"1. Install MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-check-installation"},"2. Check installation."),(0,l.kt)("p",null,"Wait until both pods in the metallb-system namespace are Running."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"When everthing is Running, similar results will be output."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"The components of the manifest are as follows:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a deployment, responsible for assigning external IP addresses for load balancing."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a daemonset, responsible for configuring network communication to connect external traffic and services.")))),(0,l.kt)("p",null,"The service includes RBAC permissions which are necessary for the controller and speaker components to operate."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"Setting up the load balancing policy of MetalLB can be done by deploying a configmap containing the related configuration information."),(0,l.kt)("p",null,"There are two modes that can be configured in MetalLB:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 Mode")," "),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP Mode")," ")),(0,l.kt)("p",null,"Here we will proceed with Layer 2 mode."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"In the Layer 2 mode, it is enough to set only the range of IP addresses to be used simply.",(0,l.kt)("br",{parentName:"p"}),"\n","When using Layer 2 mode, it is not necessary to bind IP to the network interface of the worker node, because it operates in a way that it responds directly to the ARP request of the local network and provides the computer's MAC address to the client."),(0,l.kt)("p",null,"The following ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," file is the configuration for MetalLB to provide control over the IP range of 192.168.35.100 ~ 192.168.35.110, and to configure Layer 2 mode."),(0,l.kt)("p",null,"In case the cluster node and the client node are separated, the range of 192.168.35.100 ~ 192.168.35.110 must be accessible by both the client node and the cluster node."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"Apply the above settings."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"If deployed normally, it will output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"using-metallb"},"Using MetalLB"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"First, before getting the load-balancing feature from MetalLB, check the current status by changing the type of the istio-ingressgateway service in the istio-system namespace to ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer")," to provide the Kubeflow Dashboard."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input a desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, IP addresses will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"If you check again, you will see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," to verify the following screen is output."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:t(41).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of minio-service, which provides the Dashboard of minio, in the kubeflow namespace to LoadBalancer to receive the load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item. If you do not add, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," to confirm the following screen is printed. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:t(4870).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of mlflow-server-service service in the mlflow-system namespace that provides the mlflow Dashboard to LoadBalancer to receive load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input the desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Open the web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," to confirm the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:t(2786).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"First, check the current status before changing the type of seldon-core-analytics-grafana service in the seldon-system namespace which provides Grafana's Dashboard to receive Load Balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"The type of the corresponding service is ClusterIP, and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If not, an IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If you check again, you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Open the Web Browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," to confirm that the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:t(1417).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},1417:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},41:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},4870:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},2786:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/1d540fc8.639ff8f3.js b/en/assets/js/1d540fc8.92b76139.js similarity index 99% rename from en/assets/js/1d540fc8.639ff8f3.js rename to en/assets/js/1d540fc8.92b76139.js index 007c44eb..bec05fa9 100644 --- a/en/assets/js/1d540fc8.639ff8f3.js +++ b/en/assets/js/1d540fc8.92b76139.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5371],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>k});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),c=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),p=c(r),m=o,k=p["".concat(l,".").concat(m)]||p[m]||d[m]||a;return r?n.createElement(k,i(i({ref:t},u),{},{components:r})):n.createElement(k,i({ref:t},u))}));function k(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/docker",id:"prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/en/docs/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/docker.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/en/docs/prerequisites/docker/command"}},l={},c=[{value:"Container",id:"container",level:2},{value:"Docker",id:"docker",level:2},{value:"Interpretation of Layer",id:"interpretation-of-layer",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Containerization:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A technology that allows applications to be executed uniformly anywhere."))),(0,o.kt)("li",{parentName:"ul"},"Container Image:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A collection of all the files required to run an application."),(0,o.kt)("li",{parentName:"ul"},"\u2192 Similar to a mold for making fish-shaped bread (Bungeoppang)."))),(0,o.kt)("li",{parentName:"ul"},"Container:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A single process that is executed based on a container image."),(0,o.kt)("li",{parentName:"ul"},"\u2192 A fish-shaped bread (Bungeoppang) produced using a mold.")))),(0,o.kt)("h2",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker is a platform that allows you to manage and use containers.",(0,o.kt)("br",{parentName:"p"}),"\n",'Its slogan is "Build Once, Run Anywhere," guaranteeing the same execution results anywhere.'),(0,o.kt)("p",null,"In the Docker, the resources for the container are separated and the lifecycle is controlled by Linux kernel's cgroups, etc.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it is too difficult to use these interfaces directly, so an abstraction layer is created."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(3231).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"Through this, users can easily control containers with just the user-friendly API ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Users can easily control containers using the user-friendly API called ",(0,o.kt)("strong",{parentName:"li"},"Docker CLI"),".")),(0,o.kt)("h2",{id:"interpretation-of-layer"},"Interpretation of Layer"),(0,o.kt)("p",null,"The roles of the layers mentioned above are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: Utilizes the functionality of the Linux kernel to isolate namespaces, CPUs, memory, filesystems, etc., for a container, which is a single process."),(0,o.kt)("li",{parentName:"ol"},"containerd: Acts as an abstraction layer to communicate with runC (OCI layer) and uses the standardized interface (OCI)."),(0,o.kt)("li",{parentName:"ol"},"dockerd: Solely responsible for issuing commands to containerd."),(0,o.kt)("li",{parentName:"ol"},"Docker CLI: Users only need to issue commands to dockerd (Docker daemon) using Docker CLI.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},'During this communication process, Unix socket is used, so sometimes Docker-related errors occur, such as "the /var/run/docker.sock is in use" or "insufficient permissions" error messages.')))),(0,o.kt)("p",null,'Although Docker encompasses many stages, when the term "Docker" is used, it can refer to Docker CLI, Dockerd (Docker daemon), or even a single Docker container, which can lead to confusion.',(0,o.kt)("br",{parentName:"p"}),"\n",'In the upcoming text, the term "Docker" may be used in various contexts.'),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"ML engineers use Docker for the following reasons:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages."),(0,o.kt)("li",{parentName:"ol"},"Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this."),(0,o.kt)("li",{parentName:"ol"},"Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.")))}d.isMDXComponent=!0},3231:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5371],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>k});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),c=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),p=c(r),m=o,k=p["".concat(l,".").concat(m)]||p[m]||d[m]||a;return r?n.createElement(k,i(i({ref:t},u),{},{components:r})):n.createElement(k,i({ref:t},u))}));function k(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/docker",id:"prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/en/docs/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/docker.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/en/docs/prerequisites/docker/command"}},l={},c=[{value:"Container",id:"container",level:2},{value:"Docker",id:"docker",level:2},{value:"Interpretation of Layer",id:"interpretation-of-layer",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Containerization:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A technology that allows applications to be executed uniformly anywhere."))),(0,o.kt)("li",{parentName:"ul"},"Container Image:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A collection of all the files required to run an application."),(0,o.kt)("li",{parentName:"ul"},"\u2192 Similar to a mold for making fish-shaped bread (Bungeoppang)."))),(0,o.kt)("li",{parentName:"ul"},"Container:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A single process that is executed based on a container image."),(0,o.kt)("li",{parentName:"ul"},"\u2192 A fish-shaped bread (Bungeoppang) produced using a mold.")))),(0,o.kt)("h2",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker is a platform that allows you to manage and use containers.",(0,o.kt)("br",{parentName:"p"}),"\n",'Its slogan is "Build Once, Run Anywhere," guaranteeing the same execution results anywhere.'),(0,o.kt)("p",null,"In the Docker, the resources for the container are separated and the lifecycle is controlled by Linux kernel's cgroups, etc.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it is too difficult to use these interfaces directly, so an abstraction layer is created."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(3231).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"Through this, users can easily control containers with just the user-friendly API ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Users can easily control containers using the user-friendly API called ",(0,o.kt)("strong",{parentName:"li"},"Docker CLI"),".")),(0,o.kt)("h2",{id:"interpretation-of-layer"},"Interpretation of Layer"),(0,o.kt)("p",null,"The roles of the layers mentioned above are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: Utilizes the functionality of the Linux kernel to isolate namespaces, CPUs, memory, filesystems, etc., for a container, which is a single process."),(0,o.kt)("li",{parentName:"ol"},"containerd: Acts as an abstraction layer to communicate with runC (OCI layer) and uses the standardized interface (OCI)."),(0,o.kt)("li",{parentName:"ol"},"dockerd: Solely responsible for issuing commands to containerd."),(0,o.kt)("li",{parentName:"ol"},"Docker CLI: Users only need to issue commands to dockerd (Docker daemon) using Docker CLI.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},'During this communication process, Unix socket is used, so sometimes Docker-related errors occur, such as "the /var/run/docker.sock is in use" or "insufficient permissions" error messages.')))),(0,o.kt)("p",null,'Although Docker encompasses many stages, when the term "Docker" is used, it can refer to Docker CLI, Dockerd (Docker daemon), or even a single Docker container, which can lead to confusion.',(0,o.kt)("br",{parentName:"p"}),"\n",'In the upcoming text, the term "Docker" may be used in various contexts.'),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"ML engineers use Docker for the following reasons:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages."),(0,o.kt)("li",{parentName:"ol"},"Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this."),(0,o.kt)("li",{parentName:"ol"},"Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.")))}d.isMDXComponent=!0},3231:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file diff --git a/assets/js/1df93b7f.c4fb40a2.js b/en/assets/js/1df93b7f.21501263.js similarity index 95% rename from assets/js/1df93b7f.c4fb40a2.js rename to en/assets/js/1df93b7f.21501263.js index 471c3225..365a2e48 100644 --- a/assets/js/1df93b7f.c4fb40a2.js +++ b/en/assets/js/1df93b7f.21501263.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3237],{9722:(e,t,a)=>{a.d(t,{Z:()=>c});var l,r=a(7294);function n(){return n=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...c}=e;return r.createElement("svg",n({xmlns:"http://www.w3.org/2000/svg",width:1088,height:687.962,viewBox:"0 0 1088 687.962","aria-labelledby":a},c),void 0===t?r.createElement("title",{id:a},"Easy to Use"):t?r.createElement("title",{id:a},t):null,l||(l=r.createElement("g",{"data-name":"Group 12"},r.createElement("g",{"data-name":"Group 11"},r.createElement("path",{"data-name":"Path 83",d:"M961.81 454.442c-5.27 45.15-16.22 81.4-31.25 110.31-20 38.52-54.21 54.04-84.77 70.28a193.275 193.275 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.282 657.282 0 0 0-104.09-13.16q-14.97-.675-29.97-.67c-15.42.02-293.07 5.29-360.67-131.57-16.69-33.76-28.13-75-32.24-125.27-11.63-142.12 52.29-235.46 134.74-296.47 155.97-115.41 369.76-110.57 523.43 7.88 102.36 78.9 198.2 198.31 179.02 362.74Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 84",d:"M930.56 564.752c-20 38.52-47.21 64.04-77.77 80.28a193.272 193.272 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.3 657.3 0 0 0-104.09-13.16q-14.97-.675-29.97-.67-23.13.03-46.25 1.72c-100.17 7.36-253.82-6.43-321.42-143.29L326 177.962l62.95 161.619 20.09 51.59 55.37-75.98L493 275.962l130.2 149.27 36.8-81.27 254.78 207.919 14.21 11.59Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 85",d:"m302 282.962 26-57 36 83-31-60Z",opacity:.1}),r.createElement("path",{"data-name":"Path 86",d:"M554.5 647.802q-14.97-.675-29.97-.67l-115.49-255.96Z",opacity:.1}),r.createElement("path",{"data-name":"Path 87",d:"M464.411 315.191 493 292.962l130 150-132-128Z",opacity:.1}),r.createElement("path",{"data-name":"Path 88",d:"M852.79 645.032a193.265 193.265 0 0 1-27.46 11.94L623.2 425.232Z",opacity:.1}),r.createElement("circle",{"data-name":"Ellipse 11",cx:3,cy:3,r:3,transform:"translate(479 98.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 12",cx:3,cy:3,r:3,transform:"translate(396 201.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 13",cx:2,cy:2,r:2,transform:"translate(600 220.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 14",cx:2,cy:2,r:2,transform:"translate(180 265.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 15",cx:2,cy:2,r:2,transform:"translate(612 96.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 16",cx:2,cy:2,r:2,transform:"translate(736 192.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 17",cx:2,cy:2,r:2,transform:"translate(858 344.962)",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 89",d:"M306 121.222h-2.76v-2.76h-1.48v2.76H299v1.478h2.76v2.759h1.48V122.7H306Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 90",d:"M848 424.222h-2.76v-2.76h-1.48v2.76H841v1.478h2.76v2.759h1.48V425.7H848Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 91",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 92",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",opacity:.1}),r.createElement("ellipse",{"data-name":"Ellipse 18",cx:544,cy:30,rx:544,ry:30,transform:"translate(0 583.962)",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 93",d:"M568 571.962c0 33.137-14.775 24-33 24s-33 9.137-33-24 33-96 33-96 33 62.863 33 96Z",fill:"#ff6584"}),r.createElement("path",{"data-name":"Path 94",d:"M550 584.641c0 15.062-6.716 10.909-15 10.909s-15 4.153-15-10.909 15-43.636 15-43.636 15 28.576 15 43.636Z",opacity:.1}),r.createElement("rect",{"data-name":"Rectangle 97",width:92,height:18,rx:9,transform:"translate(489 604.962)",fill:"#2f2e41"}),r.createElement("rect",{"data-name":"Rectangle 98",width:92,height:18,rx:9,transform:"translate(489 586.962)",fill:"#2f2e41"}),r.createElement("path",{"data-name":"Path 95",d:"M137 490.528c0 55.343 34.719 100.126 77.626 100.126",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 96",d:"M214.626 590.654c0-55.965 38.745-101.251 86.626-101.251",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 97",d:"M165.125 495.545c0 52.57 22.14 95.109 49.5 95.109",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 98",d:"M214.626 590.654c0-71.511 44.783-129.377 100.126-129.377",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 99",d:"M198.3 591.36s11.009-.339 14.326-2.7 16.934-5.183 17.757-1.395 16.544 18.844 4.115 18.945-28.879-1.936-32.19-3.953-4.008-10.897-4.008-10.897Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 100",d:"M234.716 604.89c-12.429.1-28.879-1.936-32.19-3.953-2.522-1.536-3.527-7.048-3.863-9.591l-.368.014s.7 8.879 4.009 10.9 19.761 4.053 32.19 3.953c3.588-.029 4.827-1.305 4.759-3.2-.498 1.142-1.867 1.855-4.537 1.877Z",opacity:.2}),r.createElement("path",{"data-name":"Path 101",d:"M721.429 527.062c0 38.029 23.857 68.8 53.341 68.8",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 102",d:"M774.769 595.863c0-38.456 26.623-69.575 59.525-69.575",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 103",d:"M740.755 530.509c0 36.124 15.213 65.354 34.014 65.354",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 104",d:"M774.769 595.863c0-49.139 30.773-88.9 68.8-88.9",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 105",d:"M763.548 596.348s7.565-.233 9.844-1.856 11.636-3.562 12.2-.958 11.368 12.949 2.828 13.018-19.844-1.33-22.119-2.716-2.753-7.488-2.753-7.488Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 106",d:"M788.574 605.645c-8.54.069-19.844-1.33-22.119-2.716-1.733-1.056-2.423-4.843-2.654-6.59l-.253.01s.479 6.1 2.755 7.487 13.579 2.785 22.119 2.716c2.465-.02 3.317-.9 3.27-2.2-.343.788-1.283 1.278-3.118 1.293Z",opacity:.2}),r.createElement("path",{"data-name":"Path 107",d:"M893.813 618.699s11.36-1.729 14.5-4.591 16.89-7.488 18.217-3.667 19.494 17.447 6.633 19.107-30.153 1.609-33.835-.065-5.515-10.784-5.515-10.784Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 108",d:"M933.228 628.154c-12.86 1.659-30.153 1.609-33.835-.065-2.8-1.275-4.535-6.858-5.2-9.45l-.379.061s1.833 9.109 5.516 10.783 20.975 1.725 33.835.065c3.712-.479 4.836-1.956 4.529-3.906-.375 1.246-1.703 2.156-4.466 2.512Z",opacity:.2}),r.createElement("path",{"data-name":"Path 109",d:"M614.26 617.881s9.587-1.459 12.237-3.875 14.255-6.32 15.374-3.095 16.452 14.725 5.6 16.125-25.448 1.358-28.555-.055-4.656-9.1-4.656-9.1Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 110",d:"M647.524 625.856c-10.853 1.4-25.448 1.358-28.555-.055-2.367-1.076-3.827-5.788-4.39-7.976l-.32.051s1.547 7.687 4.655 9.1 17.7 1.456 28.555.055c3.133-.4 4.081-1.651 3.822-3.3-.314 1.057-1.435 1.825-3.767 2.125Z",opacity:.2}),r.createElement("path",{"data-name":"Path 111",d:"M122.389 613.09s7.463-1.136 9.527-3.016 11.1-4.92 11.969-2.409 12.808 11.463 4.358 12.553-19.811 1.057-22.23-.043-3.624-7.085-3.624-7.085Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 112",d:"M148.285 619.302c-8.449 1.09-19.811 1.057-22.23-.043-1.842-.838-2.979-4.506-3.417-6.209l-.249.04s1.2 5.984 3.624 7.085 13.781 1.133 22.23.043c2.439-.315 3.177-1.285 2.976-2.566-.246.818-1.119 1.416-2.934 1.65Z",opacity:.2}),r.createElement("path",{"data-name":"Path 113",d:"M383.7 601.318c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.118-36.793 93.694-36.793 93.08 6.573 93.08 36.793Z",opacity:.1}),r.createElement("path",{"data-name":"Path 114",d:"M383.7 593.881c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.114-36.8 93.69-36.8 93.084 6.576 93.084 36.8Z",fill:"#3f3d56"})),r.createElement("path",{"data-name":"Path 40",d:"M360.175 475.732h91.791v37.153h-91.791Z",fill:"#fff",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 41",d:"M277.126 597.026a21.828 21.828 0 0 1-18.908-10.927 21.829 21.829 0 0 0 18.908 32.782h21.855v-21.855Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 42",d:"m375.451 481.607 76.514-4.782v-10.928a21.854 21.854 0 0 0-21.855-21.855h-98.347l-2.732-4.735a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.732-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.731-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.735h-.071l-4.526-4.525a3.153 3.153 0 0 0-5.276 1.414l-1.5 5.577-5.674-1.521a3.154 3.154 0 0 0-3.863 3.864l1.52 5.679-5.575 1.494a3.155 3.155 0 0 0-1.416 5.278l4.526 4.526v.07l-4.735 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.727a3.154 3.154 0 0 0 0 5.464l4.735 2.736-4.735 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.735a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.728a3.154 3.154 0 0 0 0 5.464l4.732 2.732a21.854 21.854 0 0 0 21.858 21.855h131.13a21.854 21.854 0 0 0 21.855-21.855v-87.42l-76.514-4.782a11.632 11.632 0 0 1 0-23.219",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 43",d:"M408.255 618.882h32.782v-43.71h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 44",d:"M462.893 591.563a5.438 5.438 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 45",d:"M419.183 553.317h32.782v-21.855h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 46",d:"M462.893 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.814 2.814 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 47",d:"M320.836 479.556a2.732 2.732 0 0 1-2.732-2.732 8.2 8.2 0 0 0-16.391 0 2.732 2.732 0 0 1-5.464 0 13.66 13.66 0 0 1 27.319 0 2.732 2.732 0 0 1-2.732 2.732",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 48",d:"M364.546 618.881h65.565a21.854 21.854 0 0 0 21.855-21.855v-76.492h-65.565a21.854 21.854 0 0 0-21.855 21.855Z",fill:"#ffff50",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 49",d:"M435.596 554.41h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0-54.434h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.652h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m16.369-100.959c-.013 0-.024-.007-.037-.005-3.377.115-4.974 3.492-6.384 6.472-1.471 3.114-2.608 5.139-4.473 5.078-2.064-.074-3.244-2.406-4.494-4.874-1.436-2.835-3.075-6.049-6.516-5.929-3.329.114-4.932 3.053-6.346 5.646-1.5 2.762-2.529 4.442-4.5 4.364-2.106-.076-3.225-1.972-4.52-4.167-1.444-2.443-3.112-5.191-6.487-5.1-3.272.113-4.879 2.606-6.3 4.808-1.5 2.328-2.552 3.746-4.551 3.662-2.156-.076-3.27-1.65-4.558-3.472-1.447-2.047-3.077-4.363-6.442-4.251-3.2.109-4.807 2.153-6.224 3.954-1.346 1.709-2.4 3.062-4.621 2.977a1.094 1.094 0 0 0-.079 2.186c3.3.11 4.967-1.967 6.417-3.81 1.286-1.635 2.4-3.045 4.582-3.12 2.1-.09 3.091 1.218 4.584 3.327 1.417 2 3.026 4.277 6.263 4.394 3.391.114 5.022-2.42 6.467-4.663 1.292-2 2.406-3.734 4.535-3.807 1.959-.073 3.026 1.475 4.529 4.022 1.417 2.4 3.023 5.121 6.324 5.241 3.415.118 5.064-2.863 6.5-5.5 1.245-2.282 2.419-4.437 4.5-4.509 1.959-.046 2.981 1.743 4.492 4.732 1.412 2.79 3.013 5.95 6.365 6.071h.185c3.348 0 4.937-3.36 6.343-6.331 1.245-2.634 2.423-5.114 4.444-5.216Z",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 50",d:"M342.691 618.882h43.71v-43.71h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 8",transform:"rotate(-14.98 2188.845 -1120.376)"},r.createElement("rect",{"data-name":"Rectangle 3",width:92.361,height:36.462,rx:2,fill:"#d8d8d8"}),r.createElement("g",{"data-name":"Group 2",transform:"translate(1.531 23.03)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 4",width:5.336,height:5.336,rx:1,transform:"translate(16.797)"}),r.createElement("rect",{"data-name":"Rectangle 5",width:5.336,height:5.336,rx:1,transform:"translate(23.12)"}),r.createElement("rect",{"data-name":"Rectangle 6",width:5.336,height:5.336,rx:1,transform:"translate(29.444)"}),r.createElement("rect",{"data-name":"Rectangle 7",width:5.336,height:5.336,rx:1,transform:"translate(35.768)"}),r.createElement("rect",{"data-name":"Rectangle 8",width:5.336,height:5.336,rx:1,transform:"translate(42.091)"}),r.createElement("rect",{"data-name":"Rectangle 9",width:5.336,height:5.336,rx:1,transform:"translate(48.415)"}),r.createElement("rect",{"data-name":"Rectangle 10",width:5.336,height:5.336,rx:1,transform:"translate(54.739)"}),r.createElement("rect",{"data-name":"Rectangle 11",width:5.336,height:5.336,rx:1,transform:"translate(61.063)"}),r.createElement("rect",{"data-name":"Rectangle 12",width:5.336,height:5.336,rx:1,transform:"translate(67.386)"}),r.createElement("path",{"data-name":"Path 51",d:"M1.093 0h13.425a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0ZM75 0h13.426a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H75a1.093 1.093 0 0 1-1.093-1.093v-3.15A1.093 1.093 0 0 1 75 0Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 3",transform:"translate(1.531 10.261)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 52",d:"M1.093 0h5.125A1.093 1.093 0 0 1 7.31 1.093v3.149a1.093 1.093 0 0 1-1.092 1.093H1.093A1.093 1.093 0 0 1 0 4.242V1.093A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 13",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 14",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 15",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 16",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 17",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 18",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 19",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 20",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 21",width:5.336,height:5.336,rx:1,transform:"translate(58.888)"}),r.createElement("rect",{"data-name":"Rectangle 22",width:5.336,height:5.336,rx:1,transform:"translate(65.212)"}),r.createElement("rect",{"data-name":"Rectangle 23",width:5.336,height:5.336,rx:1,transform:"translate(71.536)"}),r.createElement("rect",{"data-name":"Rectangle 24",width:5.336,height:5.336,rx:1,transform:"translate(77.859)"}),r.createElement("rect",{"data-name":"Rectangle 25",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 4",transform:"rotate(180 45.525 4.773)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 53",d:"M1.093 0h5.126a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 26",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 27",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 28",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 29",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 30",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 31",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 32",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 33",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 34",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 35",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 36",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 37",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 38",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("rect",{"data-name":"Rectangle 39",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 40",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 41",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 42",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 43",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 44",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 45",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 46",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 47",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 48",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 49",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 50",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 51",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 6",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 54",d:"M2.624 16.584h7.3a1.093 1.093 0 0 1 1.092 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093h-7.3a1.093 1.093 0 0 1-1.092-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 5",transform:"translate(12.202 16.584)"},r.createElement("rect",{"data-name":"Rectangle 52",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 53",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 54",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 55",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("rect",{"data-name":"Rectangle 56",width:5.336,height:5.336,rx:1,transform:"translate(25.295)"}),r.createElement("rect",{"data-name":"Rectangle 57",width:5.336,height:5.336,rx:1,transform:"translate(31.619)"}),r.createElement("rect",{"data-name":"Rectangle 58",width:5.336,height:5.336,rx:1,transform:"translate(37.942)"}),r.createElement("rect",{"data-name":"Rectangle 59",width:5.336,height:5.336,rx:1,transform:"translate(44.265)"}),r.createElement("rect",{"data-name":"Rectangle 60",width:5.336,height:5.336,rx:1,transform:"translate(50.589)"}),r.createElement("rect",{"data-name":"Rectangle 61",width:5.336,height:5.336,rx:1,transform:"translate(56.912)"}),r.createElement("rect",{"data-name":"Rectangle 62",width:5.336,height:5.336,rx:1,transform:"translate(63.236)"})),r.createElement("path",{"data-name":"Path 55",d:"M83.053 16.584h6.906a1.093 1.093 0 0 1 1.091 1.093v3.15a1.093 1.093 0 0 1-1.091 1.093h-6.907a1.093 1.093 0 0 1-1.093-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 7",transform:"translate(1.531 29.627)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 63",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 64",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 65",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 66",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("path",{"data-name":"Path 56",d:"M26.387 0h30.422a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093H26.387a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 26.387 0Zm33.594 0h3.942a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093h-3.942a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 59.981 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 67",width:5.336,height:5.336,rx:1,transform:"translate(66.003)"}),r.createElement("rect",{"data-name":"Rectangle 68",width:5.336,height:5.336,rx:1,transform:"translate(72.327)"}),r.createElement("rect",{"data-name":"Rectangle 69",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("path",{"data-name":"Path 57",d:"M78.254 2.273v-1.18A1.093 1.093 0 0 1 79.347 0h3.15a1.093 1.093 0 0 1 1.093 1.093v1.18Z"}),r.createElement("path",{"data-name":"Path 58",d:"M83.591 3.063v1.18a1.093 1.093 0 0 1-1.093 1.093h-3.15a1.093 1.093 0 0 1-1.093-1.093v-1.18Z"})),r.createElement("rect",{"data-name":"Rectangle 70",width:88.927,height:2.371,rx:1.085,transform:"translate(1.925 1.17)",fill:"#4a4a4a"}),r.createElement("rect",{"data-name":"Rectangle 71",width:4.986,height:1.581,rx:.723,transform:"translate(4.1 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 72",width:4.986,height:1.581,rx:.723,transform:"translate(10.923 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 73",width:4.986,height:1.581,rx:.723,transform:"translate(16.173 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 74",width:4.986,height:1.581,rx:.723,transform:"translate(21.421 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 75",width:4.986,height:1.581,rx:.723,transform:"translate(26.671 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 76",width:4.986,height:1.581,rx:.723,transform:"translate(33.232 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 77",width:4.986,height:1.581,rx:.723,transform:"translate(38.48 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 78",width:4.986,height:1.581,rx:.723,transform:"translate(43.73 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 79",width:4.986,height:1.581,rx:.723,transform:"translate(48.978 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 80",width:4.986,height:1.581,rx:.723,transform:"translate(55.54 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 81",width:4.986,height:1.581,rx:.723,transform:"translate(60.788 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 82",width:4.986,height:1.581,rx:.723,transform:"translate(66.038 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 83",width:4.986,height:1.581,rx:.723,transform:"translate(72.599 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 84",width:4.986,height:1.581,rx:.723,transform:"translate(77.847 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 85",width:4.986,height:1.581,rx:.723,transform:"translate(83.097 1.566)",fill:"#d8d8d8",opacity:.136})),r.createElement("path",{"data-name":"Path 59",d:"M408.256 591.563a5.439 5.439 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 60",d:"M342.691 553.317h43.71v-21.855h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 61",d:"M397.328 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.811 2.811 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 62",d:"M408.256 464.531a2.967 2.967 0 0 1-.535-.055 2.754 2.754 0 0 1-.514-.153 2.838 2.838 0 0 1-.471-.251 4.139 4.139 0 0 1-.415-.339 3.2 3.2 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.517 2.968 2.968 0 0 1 .055-.535 3.152 3.152 0 0 1 .152-.514 2.874 2.874 0 0 1 .252-.47 2.633 2.633 0 0 1 .753-.754 2.837 2.837 0 0 1 .471-.251 2.753 2.753 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 4.019 4.019 0 0 1 .339.415 2.786 2.786 0 0 1 .251.47 2.864 2.864 0 0 1 .208 1.049 2.77 2.77 0 0 1-.8 1.934 4.139 4.139 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459m21.855-1.366a2.789 2.789 0 0 1-1.935-.8 4.162 4.162 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.519 2.789 2.789 0 0 1 .8-1.934 4.139 4.139 0 0 1 .415-.339 2.838 2.838 0 0 1 .471-.251 2.752 2.752 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 2.79 2.79 0 0 1 .8 1.934 3.069 3.069 0 0 1-.055.535 2.779 2.779 0 0 1-.153.514 3.885 3.885 0 0 1-.251.47 4.02 4.02 0 0 1-.339.415 4.138 4.138 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459",fillRule:"evenodd"}))))}},4002:(e,t,a)=>{a.d(t,{Z:()=>F});var l,r,n,c,m,h,d,i,f,s,o,E,g,p,x,R,v,w,u,M,y,Z,P,b,A,q,H,N,k,L,O,G,V,_,S,j,B=a(7294);function C(){return C=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...F}=e;return B.createElement("svg",C({xmlns:"http://www.w3.org/2000/svg",width:1129,height:663,viewBox:"0 0 1129 663","aria-labelledby":a},F),void 0===t?B.createElement("title",{id:a},"Focus on What Matters"):t?B.createElement("title",{id:a},t):null,l||(l=B.createElement("circle",{cx:321,cy:321,r:321,fill:"#f2f2f2"})),r||(r=B.createElement("ellipse",{cx:559,cy:635.5,rx:514,ry:27.5,fill:"#3f3d56"})),n||(n=B.createElement("ellipse",{cx:558,cy:627,rx:460,ry:22,opacity:.2})),c||(c=B.createElement("path",{fill:"#3f3d56",d:"M131 152.5h840v50H131z"})),m||(m=B.createElement("path",{d:"M131 608.83a21.67 21.67 0 0 0 21.67 21.67h796.66A21.67 21.67 0 0 0 971 608.83V177.5H131ZM949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",fill:"#3f3d56"})),h||(h=B.createElement("path",{d:"M949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",opacity:.2})),d||(d=B.createElement("circle",{cx:181,cy:147.5,r:13,fill:"#3f3d56"})),i||(i=B.createElement("circle",{cx:217,cy:147.5,r:13,fill:"#3f3d56"})),f||(f=B.createElement("circle",{cx:253,cy:147.5,r:13,fill:"#3f3d56"})),s||(s=B.createElement("rect",{x:168,y:213.5,width:337,height:386,rx:5.335,fill:"#606060"})),o||(o=B.createElement("rect",{x:603,y:272.5,width:284,height:22,rx:5.476,fill:"#2e8555"})),E||(E=B.createElement("rect",{x:537,y:352.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),g||(g=B.createElement("rect",{x:537,y:396.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),p||(p=B.createElement("rect",{x:537,y:440.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),x||(x=B.createElement("rect",{x:537,y:484.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),R||(R=B.createElement("rect",{x:865,y:552.5,width:88,height:26,rx:7.028,fill:"#3ecc5f"})),v||(v=B.createElement("path",{d:"M1053.103 506.116a30.114 30.114 0 0 0 3.983-15.266c0-13.797-8.544-24.98-19.083-24.98s-19.082 11.183-19.082 24.98a30.114 30.114 0 0 0 3.983 15.266 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 30.114 30.114 0 0 0-3.983 15.266c0 13.797 8.543 24.981 19.082 24.981s19.083-11.184 19.083-24.98a30.114 30.114 0 0 0-3.983-15.267 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532Z",fill:"#3f3d56"})),w||(w=B.createElement("ellipse",{cx:1038.003,cy:460.318,rx:19.083,ry:24.981,fill:"#3f3d56"})),u||(u=B.createElement("ellipse",{cx:1038.003,cy:429.786,rx:19.083,ry:24.981,fill:"#3f3d56"})),M||(M=B.createElement("path",{d:"M1109.439 220.845a91.61 91.61 0 0 0 7.106-10.461l-50.14-8.235 54.228.403a91.566 91.566 0 0 0 1.746-72.426l-72.755 37.742 67.097-49.321A91.413 91.413 0 1 0 965.75 220.845a91.458 91.458 0 0 0-10.425 16.67l65.087 33.814-69.4-23.292a91.46 91.46 0 0 0 14.738 85.837 91.406 91.406 0 1 0 143.689 0 91.418 91.418 0 0 0 0-113.03Z",fill:"#3ecc5f",fillRule:"evenodd"})),y||(y=B.createElement("path",{d:"M946.188 277.36a91.013 91.013 0 0 0 19.562 56.514 91.406 91.406 0 1 0 143.689 0c12.25-15.553-163.25-66.774-163.25-56.515Z",opacity:.1})),Z||(Z=B.createElement("path",{d:"M330.12 342.936h111.474v45.12H330.12Z",fill:"#fff",fillRule:"evenodd"})),P||(P=B.createElement("path",{d:"M229.263 490.241a26.51 26.51 0 0 1-22.963-13.27 26.51 26.51 0 0 0 22.963 39.812h26.541V490.24Z",fill:"#3ecc5f",fillRule:"evenodd"})),b||(b=B.createElement("path",{d:"m348.672 350.07 92.922-5.807v-13.27a26.54 26.54 0 0 0-26.541-26.542H295.616l-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746-3.317-5.746a3.83 3.83 0 0 0-6.636 0l-3.317 5.746-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746c-.03 0-.056.004-.086.004l-5.497-5.495a3.83 3.83 0 0 0-6.407 1.717l-1.817 6.773-6.89-1.847a3.83 3.83 0 0 0-4.691 4.693l1.844 6.891-6.77 1.814a3.832 3.832 0 0 0-1.72 6.41l5.497 5.497c0 .028-.004.055-.004.085l-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318a26.54 26.54 0 0 0 26.541 26.542h159.249a26.54 26.54 0 0 0 26.541-26.542V384.075l-92.922-5.807a14.126 14.126 0 0 1 0-28.197",fill:"#3ecc5f",fillRule:"evenodd"})),A||(A=B.createElement("path",{d:"M388.511 516.783h39.812V463.7h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),q||(q=B.createElement("path",{d:"M454.865 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.099-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.445-.446a6.624 6.624 0 1 0-11.397-6.564c-.196-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.101 0c-.197.05-.394.097-.59.152a6.628 6.628 0 1 0-11.398 6.564 26.528 26.528 0 1 0 44.232 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),H||(H=B.createElement("path",{d:"M401.782 437.158h39.812v-26.541h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),N||(N=B.createElement("path",{d:"M454.865 427.205a3.318 3.318 0 0 0 0-6.635 3.411 3.411 0 0 0-.424.042c-.026-.1-.049-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2 .688q-.11-.113-.224-.223a3.282 3.282 0 0 0 .672-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.672-1.982q.114-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .424.042",fill:"#44d860",fillRule:"evenodd"})),k||(k=B.createElement("path",{d:"M282.345 347.581a3.318 3.318 0 0 1-3.317-3.318 9.953 9.953 0 1 0-19.906 0 3.318 3.318 0 1 1-6.636 0 16.588 16.588 0 1 1 33.177 0 3.318 3.318 0 0 1-3.318 3.318",fillRule:"evenodd"})),L||(L=B.createElement("path",{d:"M335.428 516.783h79.625a26.54 26.54 0 0 0 26.541-26.542v-92.895H361.97a26.54 26.54 0 0 0-26.542 26.542Z",fill:"#ffff50",fillRule:"evenodd"})),O||(O=B.createElement("path",{d:"M421.714 438.485h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.541h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0-66.106h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m0 26.294h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m19.88-122.607c-.016 0-.03-.008-.045-.007-4.1.14-6.04 4.241-7.753 7.86-1.786 3.783-3.168 6.242-5.432 6.167-2.506-.09-3.94-2.922-5.458-5.918-1.744-3.443-3.734-7.347-7.913-7.201-4.042.138-5.99 3.708-7.706 6.857-1.828 3.355-3.071 5.394-5.47 5.3-2.557-.093-3.916-2.395-5.488-5.06-1.753-2.967-3.78-6.304-7.878-6.19-3.973.137-5.925 3.166-7.648 5.84-1.822 2.826-3.098 4.549-5.527 4.447-2.618-.093-3.97-2.004-5.535-4.216-1.757-2.486-3.737-5.3-7.823-5.163-3.886.133-5.838 2.615-7.56 4.802-1.634 2.075-2.91 3.718-5.611 3.615a1.328 1.328 0 1 0-.096 2.654c4.004.134 6.032-2.389 7.793-4.628 1.562-1.985 2.91-3.698 5.564-3.789 2.556-.108 3.754 1.48 5.567 4.041 1.721 2.434 3.675 5.195 7.606 5.337 4.118.138 6.099-2.94 7.853-5.663 1.569-2.434 2.923-4.535 5.508-4.624 2.38-.088 3.674 1.792 5.5 4.885 1.722 2.916 3.671 6.22 7.68 6.365 4.147.143 6.15-3.477 7.895-6.682 1.511-2.77 2.938-5.388 5.466-5.475 2.38-.056 3.62 2.116 5.456 5.746 1.714 3.388 3.658 7.226 7.73 7.373l.224.004c4.066 0 5.996-4.08 7.704-7.689 1.511-3.198 2.942-6.21 5.397-6.334Z",fillRule:"evenodd"})),G||(G=B.createElement("path",{d:"M308.887 516.783h53.083V463.7h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),V||(V=B.createElement("path",{d:"M388.511 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.098-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.444-.446a6.624 6.624 0 1 0-11.397-6.564c-.197-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.102 0c-.196.05-.394.097-.59.152a6.628 6.628 0 1 0-11.397 6.564 26.528 26.528 0 1 0 44.231 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),_||(_=B.createElement("path",{d:"M308.887 437.158h53.083v-26.541h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),S||(S=B.createElement("path",{d:"M375.24 427.205a3.318 3.318 0 1 0 0-6.635 3.411 3.411 0 0 0-.423.042c-.026-.1-.05-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2.001.688q-.11-.113-.223-.223a3.282 3.282 0 0 0 .671-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.671-1.982q.113-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .423.042",fill:"#44d860",fillRule:"evenodd"})),j||(j=B.createElement("path",{d:"M388.511 329.334a3.603 3.603 0 0 1-.65-.067 3.344 3.344 0 0 1-.624-.185 3.447 3.447 0 0 1-.572-.306 5.027 5.027 0 0 1-.504-.411 3.887 3.887 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.602 3.602 0 0 1 .067-.65 3.826 3.826 0 0 1 .184-.624 3.489 3.489 0 0 1 .307-.57 3.197 3.197 0 0 1 .914-.916 3.447 3.447 0 0 1 .572-.305 3.344 3.344 0 0 1 .624-.186 3.07 3.07 0 0 1 1.3 0 3.223 3.223 0 0 1 1.195.49 5.028 5.028 0 0 1 .504.412 4.88 4.88 0 0 1 .411.504 3.382 3.382 0 0 1 .306.571 3.478 3.478 0 0 1 .252 1.274 3.364 3.364 0 0 1-.969 2.349 5.027 5.027 0 0 1-.504.411 3.306 3.306 0 0 1-1.845.558m26.542-1.66a3.388 3.388 0 0 1-2.35-.968 5.042 5.042 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.387 3.387 0 0 1 .967-2.349 5.026 5.026 0 0 1 .505-.411 3.447 3.447 0 0 1 .572-.305 3.343 3.343 0 0 1 .623-.186 3.07 3.07 0 0 1 1.3 0 3.224 3.224 0 0 1 1.195.49 5.026 5.026 0 0 1 .504.412 3.388 3.388 0 0 1 .97 2.35 3.726 3.726 0 0 1-.067.65 3.374 3.374 0 0 1-.186.623 4.715 4.715 0 0 1-.305.57 4.88 4.88 0 0 1-.412.505 5.026 5.026 0 0 1-.504.412 3.305 3.305 0 0 1-1.844.557",fillRule:"evenodd"})))}},8391:(e,t,a)=>{a.r(t),a.d(t,{default:()=>g});var l=a(7294),r=a(6010),n=a(9960),c=a(2263),m=a(7961),h=a(7462);const d={features:"features_t9lD",featureSvg:"featureSvg_GfXr"},i=[{title:l.createElement("a",{href:"https://makinarocks.ai/"},"MakinaRocks"),Svg:a(4002).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"Sponsored by MakinaRocks"),"\uc774 \ud504\ub85c\uc81d\ud2b8\ub294 MakinaRocks\uc758 \uc9c0\uc6d0\uc744 \ubc1b\uc544 \uc81c\uc791\ub418\uc5c8\uc2b5\ub2c8\ub2e4.")},{title:l.createElement("a",{href:"https://mlops-for-mle.github.io/"},"MLOps for MLE"),Svg:a(9722).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"ML Engineer\ub97c \uc704\ud55c MLOps Release!"),"\uad6c\uae00\uc5d0\uc11c \uc81c\uc548\ud55c MLOps 0\ub2e8\uacc4\ub97c \uc9c1\uc811 \uad6c\ud604\ud558\uba70 MLOps \uac00 \ubb34\uc5c7\uc778\uc9c0 \uacf5\ubd80\ud560 \uc218 \uc788\ub294 \ud29c\ud1a0\ub9ac\uc5bc\uc744 \uc624\ud508\ud588\uc2b5\ub2c8\ub2e4!")}];function f(e){let{title:t,Svg:a,description:n}=e;return l.createElement("div",{className:(0,r.Z)("col col--6")},l.createElement("div",{className:"text--center"},l.createElement(a,{className:d.featureSvg,role:"img"})),l.createElement("div",{className:"text--center padding-horiz--md"},l.createElement("h3",null,t),l.createElement("p",null,n)))}function s(){return l.createElement("section",{className:d.features},l.createElement("div",{className:"container"},l.createElement("div",{className:"row"},i.map(((e,t)=>l.createElement(f,(0,h.Z)({key:t},e)))))))}const o={heroBanner:"heroBanner_qdFl",buttons:"buttons_AeoN"};function E(){const{siteConfig:e}=(0,c.Z)();return l.createElement("header",{className:(0,r.Z)("hero hero--primary",o.heroBanner)},l.createElement("div",{className:"container"},l.createElement("h1",{className:"hero__title"},e.title),l.createElement("p",{className:"hero__subtitle"},e.tagline),l.createElement("div",{className:o.buttons},l.createElement(n.Z,{className:"button button--secondary button--lg",to:"/docs/introduction/intro"},"Let's Start!"))))}function g(){const{siteConfig:e}=(0,c.Z)();return l.createElement(m.Z,{title:"MLOps for ALL",description:"Description will go into a meta tag in "},l.createElement(E,null),l.createElement("main",null,l.createElement(s,null)))}}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3237],{9722:(e,t,a)=>{a.d(t,{Z:()=>c});var l,r=a(7294);function n(){return n=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...c}=e;return r.createElement("svg",n({xmlns:"http://www.w3.org/2000/svg",width:1088,height:687.962,viewBox:"0 0 1088 687.962","aria-labelledby":a},c),void 0===t?r.createElement("title",{id:a},"Easy to Use"):t?r.createElement("title",{id:a},t):null,l||(l=r.createElement("g",{"data-name":"Group 12"},r.createElement("g",{"data-name":"Group 11"},r.createElement("path",{"data-name":"Path 83",d:"M961.81 454.442c-5.27 45.15-16.22 81.4-31.25 110.31-20 38.52-54.21 54.04-84.77 70.28a193.275 193.275 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.282 657.282 0 0 0-104.09-13.16q-14.97-.675-29.97-.67c-15.42.02-293.07 5.29-360.67-131.57-16.69-33.76-28.13-75-32.24-125.27-11.63-142.12 52.29-235.46 134.74-296.47 155.97-115.41 369.76-110.57 523.43 7.88 102.36 78.9 198.2 198.31 179.02 362.74Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 84",d:"M930.56 564.752c-20 38.52-47.21 64.04-77.77 80.28a193.272 193.272 0 0 1-27.46 11.94c-55.61 19.3-117.85 14.18-166.74 3.99a657.3 657.3 0 0 0-104.09-13.16q-14.97-.675-29.97-.67-23.13.03-46.25 1.72c-100.17 7.36-253.82-6.43-321.42-143.29L326 177.962l62.95 161.619 20.09 51.59 55.37-75.98L493 275.962l130.2 149.27 36.8-81.27 254.78 207.919 14.21 11.59Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 85",d:"m302 282.962 26-57 36 83-31-60Z",opacity:.1}),r.createElement("path",{"data-name":"Path 86",d:"M554.5 647.802q-14.97-.675-29.97-.67l-115.49-255.96Z",opacity:.1}),r.createElement("path",{"data-name":"Path 87",d:"M464.411 315.191 493 292.962l130 150-132-128Z",opacity:.1}),r.createElement("path",{"data-name":"Path 88",d:"M852.79 645.032a193.265 193.265 0 0 1-27.46 11.94L623.2 425.232Z",opacity:.1}),r.createElement("circle",{"data-name":"Ellipse 11",cx:3,cy:3,r:3,transform:"translate(479 98.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 12",cx:3,cy:3,r:3,transform:"translate(396 201.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 13",cx:2,cy:2,r:2,transform:"translate(600 220.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 14",cx:2,cy:2,r:2,transform:"translate(180 265.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 15",cx:2,cy:2,r:2,transform:"translate(612 96.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 16",cx:2,cy:2,r:2,transform:"translate(736 192.962)",fill:"#f2f2f2"}),r.createElement("circle",{"data-name":"Ellipse 17",cx:2,cy:2,r:2,transform:"translate(858 344.962)",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 89",d:"M306 121.222h-2.76v-2.76h-1.48v2.76H299v1.478h2.76v2.759h1.48V122.7H306Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 90",d:"M848 424.222h-2.76v-2.76h-1.48v2.76H841v1.478h2.76v2.759h1.48V425.7H848Z",fill:"#f2f2f2"}),r.createElement("path",{"data-name":"Path 91",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 92",d:"M1088 613.962c0 16.569-243.557 74-544 74s-544-57.431-544-74 243.557 14 544 14 544-30.568 544-14Z",opacity:.1}),r.createElement("ellipse",{"data-name":"Ellipse 18",cx:544,cy:30,rx:544,ry:30,transform:"translate(0 583.962)",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 93",d:"M568 571.962c0 33.137-14.775 24-33 24s-33 9.137-33-24 33-96 33-96 33 62.863 33 96Z",fill:"#ff6584"}),r.createElement("path",{"data-name":"Path 94",d:"M550 584.641c0 15.062-6.716 10.909-15 10.909s-15 4.153-15-10.909 15-43.636 15-43.636 15 28.576 15 43.636Z",opacity:.1}),r.createElement("rect",{"data-name":"Rectangle 97",width:92,height:18,rx:9,transform:"translate(489 604.962)",fill:"#2f2e41"}),r.createElement("rect",{"data-name":"Rectangle 98",width:92,height:18,rx:9,transform:"translate(489 586.962)",fill:"#2f2e41"}),r.createElement("path",{"data-name":"Path 95",d:"M137 490.528c0 55.343 34.719 100.126 77.626 100.126",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 96",d:"M214.626 590.654c0-55.965 38.745-101.251 86.626-101.251",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 97",d:"M165.125 495.545c0 52.57 22.14 95.109 49.5 95.109",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 98",d:"M214.626 590.654c0-71.511 44.783-129.377 100.126-129.377",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 99",d:"M198.3 591.36s11.009-.339 14.326-2.7 16.934-5.183 17.757-1.395 16.544 18.844 4.115 18.945-28.879-1.936-32.19-3.953-4.008-10.897-4.008-10.897Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 100",d:"M234.716 604.89c-12.429.1-28.879-1.936-32.19-3.953-2.522-1.536-3.527-7.048-3.863-9.591l-.368.014s.7 8.879 4.009 10.9 19.761 4.053 32.19 3.953c3.588-.029 4.827-1.305 4.759-3.2-.498 1.142-1.867 1.855-4.537 1.877Z",opacity:.2}),r.createElement("path",{"data-name":"Path 101",d:"M721.429 527.062c0 38.029 23.857 68.8 53.341 68.8",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 102",d:"M774.769 595.863c0-38.456 26.623-69.575 59.525-69.575",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 103",d:"M740.755 530.509c0 36.124 15.213 65.354 34.014 65.354",fill:"#6c63ff"}),r.createElement("path",{"data-name":"Path 104",d:"M774.769 595.863c0-49.139 30.773-88.9 68.8-88.9",fill:"#3f3d56"}),r.createElement("path",{"data-name":"Path 105",d:"M763.548 596.348s7.565-.233 9.844-1.856 11.636-3.562 12.2-.958 11.368 12.949 2.828 13.018-19.844-1.33-22.119-2.716-2.753-7.488-2.753-7.488Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 106",d:"M788.574 605.645c-8.54.069-19.844-1.33-22.119-2.716-1.733-1.056-2.423-4.843-2.654-6.59l-.253.01s.479 6.1 2.755 7.487 13.579 2.785 22.119 2.716c2.465-.02 3.317-.9 3.27-2.2-.343.788-1.283 1.278-3.118 1.293Z",opacity:.2}),r.createElement("path",{"data-name":"Path 107",d:"M893.813 618.699s11.36-1.729 14.5-4.591 16.89-7.488 18.217-3.667 19.494 17.447 6.633 19.107-30.153 1.609-33.835-.065-5.515-10.784-5.515-10.784Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 108",d:"M933.228 628.154c-12.86 1.659-30.153 1.609-33.835-.065-2.8-1.275-4.535-6.858-5.2-9.45l-.379.061s1.833 9.109 5.516 10.783 20.975 1.725 33.835.065c3.712-.479 4.836-1.956 4.529-3.906-.375 1.246-1.703 2.156-4.466 2.512Z",opacity:.2}),r.createElement("path",{"data-name":"Path 109",d:"M614.26 617.881s9.587-1.459 12.237-3.875 14.255-6.32 15.374-3.095 16.452 14.725 5.6 16.125-25.448 1.358-28.555-.055-4.656-9.1-4.656-9.1Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 110",d:"M647.524 625.856c-10.853 1.4-25.448 1.358-28.555-.055-2.367-1.076-3.827-5.788-4.39-7.976l-.32.051s1.547 7.687 4.655 9.1 17.7 1.456 28.555.055c3.133-.4 4.081-1.651 3.822-3.3-.314 1.057-1.435 1.825-3.767 2.125Z",opacity:.2}),r.createElement("path",{"data-name":"Path 111",d:"M122.389 613.09s7.463-1.136 9.527-3.016 11.1-4.92 11.969-2.409 12.808 11.463 4.358 12.553-19.811 1.057-22.23-.043-3.624-7.085-3.624-7.085Z",fill:"#a8a8a8"}),r.createElement("path",{"data-name":"Path 112",d:"M148.285 619.302c-8.449 1.09-19.811 1.057-22.23-.043-1.842-.838-2.979-4.506-3.417-6.209l-.249.04s1.2 5.984 3.624 7.085 13.781 1.133 22.23.043c2.439-.315 3.177-1.285 2.976-2.566-.246.818-1.119 1.416-2.934 1.65Z",opacity:.2}),r.createElement("path",{"data-name":"Path 113",d:"M383.7 601.318c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.118-36.793 93.694-36.793 93.08 6.573 93.08 36.793Z",opacity:.1}),r.createElement("path",{"data-name":"Path 114",d:"M383.7 593.881c0 30.22-42.124 20.873-93.7 20.873s-93.074 9.347-93.074-20.873 42.114-36.8 93.69-36.8 93.084 6.576 93.084 36.8Z",fill:"#3f3d56"})),r.createElement("path",{"data-name":"Path 40",d:"M360.175 475.732h91.791v37.153h-91.791Z",fill:"#fff",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 41",d:"M277.126 597.026a21.828 21.828 0 0 1-18.908-10.927 21.829 21.829 0 0 0 18.908 32.782h21.855v-21.855Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 42",d:"m375.451 481.607 76.514-4.782v-10.928a21.854 21.854 0 0 0-21.855-21.855h-98.347l-2.732-4.735a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.732-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.732-2.731-4.732a3.154 3.154 0 0 0-5.464 0l-2.732 4.735h-.071l-4.526-4.525a3.153 3.153 0 0 0-5.276 1.414l-1.5 5.577-5.674-1.521a3.154 3.154 0 0 0-3.863 3.864l1.52 5.679-5.575 1.494a3.155 3.155 0 0 0-1.416 5.278l4.526 4.526v.07l-4.735 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.727a3.154 3.154 0 0 0 0 5.464l4.735 2.736-4.735 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.732a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.731a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.735a3.154 3.154 0 0 0 0 5.464l4.732 2.732-4.732 2.728a3.154 3.154 0 0 0 0 5.464l4.732 2.732a21.854 21.854 0 0 0 21.858 21.855h131.13a21.854 21.854 0 0 0 21.855-21.855v-87.42l-76.514-4.782a11.632 11.632 0 0 1 0-23.219",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 43",d:"M408.255 618.882h32.782v-43.71h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 44",d:"M462.893 591.563a5.438 5.438 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 45",d:"M419.183 553.317h32.782v-21.855h-32.782Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 46",d:"M462.893 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.814 2.814 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 47",d:"M320.836 479.556a2.732 2.732 0 0 1-2.732-2.732 8.2 8.2 0 0 0-16.391 0 2.732 2.732 0 0 1-5.464 0 13.66 13.66 0 0 1 27.319 0 2.732 2.732 0 0 1-2.732 2.732",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 48",d:"M364.546 618.881h65.565a21.854 21.854 0 0 0 21.855-21.855v-76.492h-65.565a21.854 21.854 0 0 0-21.855 21.855Z",fill:"#ffff50",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 49",d:"M435.596 554.41h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0-54.434h-54.681a1.093 1.093 0 1 1 0-2.185h54.681a1.093 1.093 0 0 1 0 2.185m0 21.652h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m0 21.855h-54.681a1.093 1.093 0 1 1 0-2.186h54.681a1.093 1.093 0 0 1 0 2.186m16.369-100.959c-.013 0-.024-.007-.037-.005-3.377.115-4.974 3.492-6.384 6.472-1.471 3.114-2.608 5.139-4.473 5.078-2.064-.074-3.244-2.406-4.494-4.874-1.436-2.835-3.075-6.049-6.516-5.929-3.329.114-4.932 3.053-6.346 5.646-1.5 2.762-2.529 4.442-4.5 4.364-2.106-.076-3.225-1.972-4.52-4.167-1.444-2.443-3.112-5.191-6.487-5.1-3.272.113-4.879 2.606-6.3 4.808-1.5 2.328-2.552 3.746-4.551 3.662-2.156-.076-3.27-1.65-4.558-3.472-1.447-2.047-3.077-4.363-6.442-4.251-3.2.109-4.807 2.153-6.224 3.954-1.346 1.709-2.4 3.062-4.621 2.977a1.094 1.094 0 0 0-.079 2.186c3.3.11 4.967-1.967 6.417-3.81 1.286-1.635 2.4-3.045 4.582-3.12 2.1-.09 3.091 1.218 4.584 3.327 1.417 2 3.026 4.277 6.263 4.394 3.391.114 5.022-2.42 6.467-4.663 1.292-2 2.406-3.734 4.535-3.807 1.959-.073 3.026 1.475 4.529 4.022 1.417 2.4 3.023 5.121 6.324 5.241 3.415.118 5.064-2.863 6.5-5.5 1.245-2.282 2.419-4.437 4.5-4.509 1.959-.046 2.981 1.743 4.492 4.732 1.412 2.79 3.013 5.95 6.365 6.071h.185c3.348 0 4.937-3.36 6.343-6.331 1.245-2.634 2.423-5.114 4.444-5.216Z",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 50",d:"M342.691 618.882h43.71v-43.71h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 8",transform:"rotate(-14.98 2188.845 -1120.376)"},r.createElement("rect",{"data-name":"Rectangle 3",width:92.361,height:36.462,rx:2,fill:"#d8d8d8"}),r.createElement("g",{"data-name":"Group 2",transform:"translate(1.531 23.03)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 4",width:5.336,height:5.336,rx:1,transform:"translate(16.797)"}),r.createElement("rect",{"data-name":"Rectangle 5",width:5.336,height:5.336,rx:1,transform:"translate(23.12)"}),r.createElement("rect",{"data-name":"Rectangle 6",width:5.336,height:5.336,rx:1,transform:"translate(29.444)"}),r.createElement("rect",{"data-name":"Rectangle 7",width:5.336,height:5.336,rx:1,transform:"translate(35.768)"}),r.createElement("rect",{"data-name":"Rectangle 8",width:5.336,height:5.336,rx:1,transform:"translate(42.091)"}),r.createElement("rect",{"data-name":"Rectangle 9",width:5.336,height:5.336,rx:1,transform:"translate(48.415)"}),r.createElement("rect",{"data-name":"Rectangle 10",width:5.336,height:5.336,rx:1,transform:"translate(54.739)"}),r.createElement("rect",{"data-name":"Rectangle 11",width:5.336,height:5.336,rx:1,transform:"translate(61.063)"}),r.createElement("rect",{"data-name":"Rectangle 12",width:5.336,height:5.336,rx:1,transform:"translate(67.386)"}),r.createElement("path",{"data-name":"Path 51",d:"M1.093 0h13.425a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0ZM75 0h13.426a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H75a1.093 1.093 0 0 1-1.093-1.093v-3.15A1.093 1.093 0 0 1 75 0Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 3",transform:"translate(1.531 10.261)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 52",d:"M1.093 0h5.125A1.093 1.093 0 0 1 7.31 1.093v3.149a1.093 1.093 0 0 1-1.092 1.093H1.093A1.093 1.093 0 0 1 0 4.242V1.093A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 13",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 14",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 15",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 16",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 17",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 18",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 19",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 20",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 21",width:5.336,height:5.336,rx:1,transform:"translate(58.888)"}),r.createElement("rect",{"data-name":"Rectangle 22",width:5.336,height:5.336,rx:1,transform:"translate(65.212)"}),r.createElement("rect",{"data-name":"Rectangle 23",width:5.336,height:5.336,rx:1,transform:"translate(71.536)"}),r.createElement("rect",{"data-name":"Rectangle 24",width:5.336,height:5.336,rx:1,transform:"translate(77.859)"}),r.createElement("rect",{"data-name":"Rectangle 25",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 4",transform:"rotate(180 45.525 4.773)",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 53",d:"M1.093 0h5.126a1.093 1.093 0 0 1 1.093 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093H1.093A1.093 1.093 0 0 1 0 4.243v-3.15A1.093 1.093 0 0 1 1.093 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 26",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 27",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 28",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 29",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 30",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 31",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 32",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 33",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 34",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 35",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 36",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 37",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 38",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("rect",{"data-name":"Rectangle 39",width:5.336,height:5.336,rx:1,transform:"translate(8.299)"}),r.createElement("rect",{"data-name":"Rectangle 40",width:5.336,height:5.336,rx:1,transform:"translate(14.623)"}),r.createElement("rect",{"data-name":"Rectangle 41",width:5.336,height:5.336,rx:1,transform:"translate(20.947)"}),r.createElement("rect",{"data-name":"Rectangle 42",width:5.336,height:5.336,rx:1,transform:"translate(27.271)"}),r.createElement("rect",{"data-name":"Rectangle 43",width:5.336,height:5.336,rx:1,transform:"translate(33.594)"}),r.createElement("rect",{"data-name":"Rectangle 44",width:5.336,height:5.336,rx:1,transform:"translate(39.918)"}),r.createElement("rect",{"data-name":"Rectangle 45",width:5.336,height:5.336,rx:1,transform:"translate(46.242)"}),r.createElement("rect",{"data-name":"Rectangle 46",width:5.336,height:5.336,rx:1,transform:"translate(52.565)"}),r.createElement("rect",{"data-name":"Rectangle 47",width:5.336,height:5.336,rx:1,transform:"translate(58.889)"}),r.createElement("rect",{"data-name":"Rectangle 48",width:5.336,height:5.336,rx:1,transform:"translate(65.213)"}),r.createElement("rect",{"data-name":"Rectangle 49",width:5.336,height:5.336,rx:1,transform:"translate(71.537)"}),r.createElement("rect",{"data-name":"Rectangle 50",width:5.336,height:5.336,rx:1,transform:"translate(77.86)"}),r.createElement("rect",{"data-name":"Rectangle 51",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"})),r.createElement("g",{"data-name":"Group 6",fill:"#4a4a4a"},r.createElement("path",{"data-name":"Path 54",d:"M2.624 16.584h7.3a1.093 1.093 0 0 1 1.092 1.093v3.15a1.093 1.093 0 0 1-1.093 1.093h-7.3a1.093 1.093 0 0 1-1.092-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"}),r.createElement("g",{"data-name":"Group 5",transform:"translate(12.202 16.584)"},r.createElement("rect",{"data-name":"Rectangle 52",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 53",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 54",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 55",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("rect",{"data-name":"Rectangle 56",width:5.336,height:5.336,rx:1,transform:"translate(25.295)"}),r.createElement("rect",{"data-name":"Rectangle 57",width:5.336,height:5.336,rx:1,transform:"translate(31.619)"}),r.createElement("rect",{"data-name":"Rectangle 58",width:5.336,height:5.336,rx:1,transform:"translate(37.942)"}),r.createElement("rect",{"data-name":"Rectangle 59",width:5.336,height:5.336,rx:1,transform:"translate(44.265)"}),r.createElement("rect",{"data-name":"Rectangle 60",width:5.336,height:5.336,rx:1,transform:"translate(50.589)"}),r.createElement("rect",{"data-name":"Rectangle 61",width:5.336,height:5.336,rx:1,transform:"translate(56.912)"}),r.createElement("rect",{"data-name":"Rectangle 62",width:5.336,height:5.336,rx:1,transform:"translate(63.236)"})),r.createElement("path",{"data-name":"Path 55",d:"M83.053 16.584h6.906a1.093 1.093 0 0 1 1.091 1.093v3.15a1.093 1.093 0 0 1-1.091 1.093h-6.907a1.093 1.093 0 0 1-1.093-1.093v-3.149a1.093 1.093 0 0 1 1.093-1.094Z",fillRule:"evenodd"})),r.createElement("g",{"data-name":"Group 7",transform:"translate(1.531 29.627)",fill:"#4a4a4a"},r.createElement("rect",{"data-name":"Rectangle 63",width:5.336,height:5.336,rx:1}),r.createElement("rect",{"data-name":"Rectangle 64",width:5.336,height:5.336,rx:1,transform:"translate(6.324)"}),r.createElement("rect",{"data-name":"Rectangle 65",width:5.336,height:5.336,rx:1,transform:"translate(12.647)"}),r.createElement("rect",{"data-name":"Rectangle 66",width:5.336,height:5.336,rx:1,transform:"translate(18.971)"}),r.createElement("path",{"data-name":"Path 56",d:"M26.387 0h30.422a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093H26.387a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 26.387 0Zm33.594 0h3.942a1.093 1.093 0 0 1 1.093 1.093v3.151a1.093 1.093 0 0 1-1.093 1.093h-3.942a1.093 1.093 0 0 1-1.093-1.093V1.093A1.093 1.093 0 0 1 59.981 0Z",fillRule:"evenodd"}),r.createElement("rect",{"data-name":"Rectangle 67",width:5.336,height:5.336,rx:1,transform:"translate(66.003)"}),r.createElement("rect",{"data-name":"Rectangle 68",width:5.336,height:5.336,rx:1,transform:"translate(72.327)"}),r.createElement("rect",{"data-name":"Rectangle 69",width:5.336,height:5.336,rx:1,transform:"translate(84.183)"}),r.createElement("path",{"data-name":"Path 57",d:"M78.254 2.273v-1.18A1.093 1.093 0 0 1 79.347 0h3.15a1.093 1.093 0 0 1 1.093 1.093v1.18Z"}),r.createElement("path",{"data-name":"Path 58",d:"M83.591 3.063v1.18a1.093 1.093 0 0 1-1.093 1.093h-3.15a1.093 1.093 0 0 1-1.093-1.093v-1.18Z"})),r.createElement("rect",{"data-name":"Rectangle 70",width:88.927,height:2.371,rx:1.085,transform:"translate(1.925 1.17)",fill:"#4a4a4a"}),r.createElement("rect",{"data-name":"Rectangle 71",width:4.986,height:1.581,rx:.723,transform:"translate(4.1 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 72",width:4.986,height:1.581,rx:.723,transform:"translate(10.923 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 73",width:4.986,height:1.581,rx:.723,transform:"translate(16.173 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 74",width:4.986,height:1.581,rx:.723,transform:"translate(21.421 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 75",width:4.986,height:1.581,rx:.723,transform:"translate(26.671 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 76",width:4.986,height:1.581,rx:.723,transform:"translate(33.232 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 77",width:4.986,height:1.581,rx:.723,transform:"translate(38.48 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 78",width:4.986,height:1.581,rx:.723,transform:"translate(43.73 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 79",width:4.986,height:1.581,rx:.723,transform:"translate(48.978 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 80",width:4.986,height:1.581,rx:.723,transform:"translate(55.54 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 81",width:4.986,height:1.581,rx:.723,transform:"translate(60.788 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 82",width:4.986,height:1.581,rx:.723,transform:"translate(66.038 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 83",width:4.986,height:1.581,rx:.723,transform:"translate(72.599 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 84",width:4.986,height:1.581,rx:.723,transform:"translate(77.847 1.566)",fill:"#d8d8d8",opacity:.136}),r.createElement("rect",{"data-name":"Rectangle 85",width:4.986,height:1.581,rx:.723,transform:"translate(83.097 1.566)",fill:"#d8d8d8",opacity:.136})),r.createElement("path",{"data-name":"Path 59",d:"M408.256 591.563a5.439 5.439 0 0 0-.7.07c-.042-.164-.081-.329-.127-.493a5.457 5.457 0 1 0-5.4-9.372q-.181-.185-.366-.367a5.454 5.454 0 1 0-9.384-5.4c-.162-.046-.325-.084-.486-.126a5.467 5.467 0 1 0-10.788 0c-.162.042-.325.08-.486.126a5.457 5.457 0 1 0-9.384 5.4 21.843 21.843 0 1 0 36.421 21.02 5.452 5.452 0 1 0 .7-10.858",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 60",d:"M342.691 553.317h43.71v-21.855h-43.71Z",fill:"#3ecc5f",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 61",d:"M397.328 545.121a2.732 2.732 0 1 0 0-5.464 2.811 2.811 0 0 0-.349.035c-.022-.082-.04-.164-.063-.246a2.733 2.733 0 0 0-1.052-5.253 2.7 2.7 0 0 0-1.648.566q-.09-.093-.184-.184a2.7 2.7 0 0 0 .553-1.633 2.732 2.732 0 0 0-5.245-1.07 10.928 10.928 0 1 0 0 21.031 2.732 2.732 0 0 0 5.245-1.07 2.7 2.7 0 0 0-.553-1.633q.093-.09.184-.184a2.7 2.7 0 0 0 1.648.566 2.732 2.732 0 0 0 1.052-5.253c.023-.081.042-.164.063-.246a2.811 2.811 0 0 0 .349.035",fill:"#44d860",fillRule:"evenodd"}),r.createElement("path",{"data-name":"Path 62",d:"M408.256 464.531a2.967 2.967 0 0 1-.535-.055 2.754 2.754 0 0 1-.514-.153 2.838 2.838 0 0 1-.471-.251 4.139 4.139 0 0 1-.415-.339 3.2 3.2 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.517 2.968 2.968 0 0 1 .055-.535 3.152 3.152 0 0 1 .152-.514 2.874 2.874 0 0 1 .252-.47 2.633 2.633 0 0 1 .753-.754 2.837 2.837 0 0 1 .471-.251 2.753 2.753 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 4.019 4.019 0 0 1 .339.415 2.786 2.786 0 0 1 .251.47 2.864 2.864 0 0 1 .208 1.049 2.77 2.77 0 0 1-.8 1.934 4.139 4.139 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459m21.855-1.366a2.789 2.789 0 0 1-1.935-.8 4.162 4.162 0 0 1-.338-.415 2.7 2.7 0 0 1-.459-1.519 2.789 2.789 0 0 1 .8-1.934 4.139 4.139 0 0 1 .415-.339 2.838 2.838 0 0 1 .471-.251 2.752 2.752 0 0 1 .514-.153 2.527 2.527 0 0 1 1.071 0 2.654 2.654 0 0 1 .983.4 4.139 4.139 0 0 1 .415.339 2.79 2.79 0 0 1 .8 1.934 3.069 3.069 0 0 1-.055.535 2.779 2.779 0 0 1-.153.514 3.885 3.885 0 0 1-.251.47 4.02 4.02 0 0 1-.339.415 4.138 4.138 0 0 1-.415.339 2.722 2.722 0 0 1-1.519.459",fillRule:"evenodd"}))))}},4002:(e,t,a)=>{a.d(t,{Z:()=>F});var l,r,n,c,m,h,d,i,f,s,o,E,g,p,x,R,v,w,u,M,y,Z,P,b,A,q,H,N,k,L,O,G,V,_,S,j,B=a(7294);function C(){return C=Object.assign?Object.assign.bind():function(e){for(var t=1;t{let{title:t,titleId:a,...F}=e;return B.createElement("svg",C({xmlns:"http://www.w3.org/2000/svg",width:1129,height:663,viewBox:"0 0 1129 663","aria-labelledby":a},F),void 0===t?B.createElement("title",{id:a},"Focus on What Matters"):t?B.createElement("title",{id:a},t):null,l||(l=B.createElement("circle",{cx:321,cy:321,r:321,fill:"#f2f2f2"})),r||(r=B.createElement("ellipse",{cx:559,cy:635.5,rx:514,ry:27.5,fill:"#3f3d56"})),n||(n=B.createElement("ellipse",{cx:558,cy:627,rx:460,ry:22,opacity:.2})),c||(c=B.createElement("path",{fill:"#3f3d56",d:"M131 152.5h840v50H131z"})),m||(m=B.createElement("path",{d:"M131 608.83a21.67 21.67 0 0 0 21.67 21.67h796.66A21.67 21.67 0 0 0 971 608.83V177.5H131ZM949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",fill:"#3f3d56"})),h||(h=B.createElement("path",{d:"M949.33 117.5H152.67A21.67 21.67 0 0 0 131 139.17v38.33h840v-38.33a21.67 21.67 0 0 0-21.67-21.67Z",opacity:.2})),d||(d=B.createElement("circle",{cx:181,cy:147.5,r:13,fill:"#3f3d56"})),i||(i=B.createElement("circle",{cx:217,cy:147.5,r:13,fill:"#3f3d56"})),f||(f=B.createElement("circle",{cx:253,cy:147.5,r:13,fill:"#3f3d56"})),s||(s=B.createElement("rect",{x:168,y:213.5,width:337,height:386,rx:5.335,fill:"#606060"})),o||(o=B.createElement("rect",{x:603,y:272.5,width:284,height:22,rx:5.476,fill:"#2e8555"})),E||(E=B.createElement("rect",{x:537,y:352.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),g||(g=B.createElement("rect",{x:537,y:396.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),p||(p=B.createElement("rect",{x:537,y:440.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),x||(x=B.createElement("rect",{x:537,y:484.5,width:416,height:15,rx:5.476,fill:"#2e8555"})),R||(R=B.createElement("rect",{x:865,y:552.5,width:88,height:26,rx:7.028,fill:"#3ecc5f"})),v||(v=B.createElement("path",{d:"M1053.103 506.116a30.114 30.114 0 0 0 3.983-15.266c0-13.797-8.544-24.98-19.083-24.98s-19.082 11.183-19.082 24.98a30.114 30.114 0 0 0 3.983 15.266 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 31.248 31.248 0 0 0 0 30.532 30.114 30.114 0 0 0-3.983 15.266c0 13.797 8.543 24.981 19.082 24.981s19.083-11.184 19.083-24.98a30.114 30.114 0 0 0-3.983-15.267 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532 31.248 31.248 0 0 0 0-30.532Z",fill:"#3f3d56"})),w||(w=B.createElement("ellipse",{cx:1038.003,cy:460.318,rx:19.083,ry:24.981,fill:"#3f3d56"})),u||(u=B.createElement("ellipse",{cx:1038.003,cy:429.786,rx:19.083,ry:24.981,fill:"#3f3d56"})),M||(M=B.createElement("path",{d:"M1109.439 220.845a91.61 91.61 0 0 0 7.106-10.461l-50.14-8.235 54.228.403a91.566 91.566 0 0 0 1.746-72.426l-72.755 37.742 67.097-49.321A91.413 91.413 0 1 0 965.75 220.845a91.458 91.458 0 0 0-10.425 16.67l65.087 33.814-69.4-23.292a91.46 91.46 0 0 0 14.738 85.837 91.406 91.406 0 1 0 143.689 0 91.418 91.418 0 0 0 0-113.03Z",fill:"#3ecc5f",fillRule:"evenodd"})),y||(y=B.createElement("path",{d:"M946.188 277.36a91.013 91.013 0 0 0 19.562 56.514 91.406 91.406 0 1 0 143.689 0c12.25-15.553-163.25-66.774-163.25-56.515Z",opacity:.1})),Z||(Z=B.createElement("path",{d:"M330.12 342.936h111.474v45.12H330.12Z",fill:"#fff",fillRule:"evenodd"})),P||(P=B.createElement("path",{d:"M229.263 490.241a26.51 26.51 0 0 1-22.963-13.27 26.51 26.51 0 0 0 22.963 39.812h26.541V490.24Z",fill:"#3ecc5f",fillRule:"evenodd"})),b||(b=B.createElement("path",{d:"m348.672 350.07 92.922-5.807v-13.27a26.54 26.54 0 0 0-26.541-26.542H295.616l-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746-3.317-5.746a3.83 3.83 0 0 0-6.636 0l-3.317 5.746-3.318-5.746a3.83 3.83 0 0 0-6.635 0l-3.318 5.746c-.03 0-.056.004-.086.004l-5.497-5.495a3.83 3.83 0 0 0-6.407 1.717l-1.817 6.773-6.89-1.847a3.83 3.83 0 0 0-4.691 4.693l1.844 6.891-6.77 1.814a3.832 3.832 0 0 0-1.72 6.41l5.497 5.497c0 .028-.004.055-.004.085l-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318-5.747 3.317a3.83 3.83 0 0 0 0 6.636l5.747 3.317-5.747 3.318a3.83 3.83 0 0 0 0 6.635l5.747 3.318a26.54 26.54 0 0 0 26.541 26.542h159.249a26.54 26.54 0 0 0 26.541-26.542V384.075l-92.922-5.807a14.126 14.126 0 0 1 0-28.197",fill:"#3ecc5f",fillRule:"evenodd"})),A||(A=B.createElement("path",{d:"M388.511 516.783h39.812V463.7h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),q||(q=B.createElement("path",{d:"M454.865 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.099-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.445-.446a6.624 6.624 0 1 0-11.397-6.564c-.196-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.101 0c-.197.05-.394.097-.59.152a6.628 6.628 0 1 0-11.398 6.564 26.528 26.528 0 1 0 44.232 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),H||(H=B.createElement("path",{d:"M401.782 437.158h39.812v-26.541h-39.812Z",fill:"#3ecc5f",fillRule:"evenodd"})),N||(N=B.createElement("path",{d:"M454.865 427.205a3.318 3.318 0 0 0 0-6.635 3.411 3.411 0 0 0-.424.042c-.026-.1-.049-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2 .688q-.11-.113-.224-.223a3.282 3.282 0 0 0 .672-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.672-1.982q.114-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .424.042",fill:"#44d860",fillRule:"evenodd"})),k||(k=B.createElement("path",{d:"M282.345 347.581a3.318 3.318 0 0 1-3.317-3.318 9.953 9.953 0 1 0-19.906 0 3.318 3.318 0 1 1-6.636 0 16.588 16.588 0 1 1 33.177 0 3.318 3.318 0 0 1-3.318 3.318",fillRule:"evenodd"})),L||(L=B.createElement("path",{d:"M335.428 516.783h79.625a26.54 26.54 0 0 0 26.541-26.542v-92.895H361.97a26.54 26.54 0 0 0-26.542 26.542Z",fill:"#ffff50",fillRule:"evenodd"})),O||(O=B.createElement("path",{d:"M421.714 438.485h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.541h-66.406a1.327 1.327 0 1 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0-66.106h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m0 26.294h-66.406a1.327 1.327 0 0 1 0-2.654h66.406a1.327 1.327 0 0 1 0 2.654m0 26.542h-66.406a1.327 1.327 0 0 1 0-2.655h66.406a1.327 1.327 0 0 1 0 2.655m19.88-122.607c-.016 0-.03-.008-.045-.007-4.1.14-6.04 4.241-7.753 7.86-1.786 3.783-3.168 6.242-5.432 6.167-2.506-.09-3.94-2.922-5.458-5.918-1.744-3.443-3.734-7.347-7.913-7.201-4.042.138-5.99 3.708-7.706 6.857-1.828 3.355-3.071 5.394-5.47 5.3-2.557-.093-3.916-2.395-5.488-5.06-1.753-2.967-3.78-6.304-7.878-6.19-3.973.137-5.925 3.166-7.648 5.84-1.822 2.826-3.098 4.549-5.527 4.447-2.618-.093-3.97-2.004-5.535-4.216-1.757-2.486-3.737-5.3-7.823-5.163-3.886.133-5.838 2.615-7.56 4.802-1.634 2.075-2.91 3.718-5.611 3.615a1.328 1.328 0 1 0-.096 2.654c4.004.134 6.032-2.389 7.793-4.628 1.562-1.985 2.91-3.698 5.564-3.789 2.556-.108 3.754 1.48 5.567 4.041 1.721 2.434 3.675 5.195 7.606 5.337 4.118.138 6.099-2.94 7.853-5.663 1.569-2.434 2.923-4.535 5.508-4.624 2.38-.088 3.674 1.792 5.5 4.885 1.722 2.916 3.671 6.22 7.68 6.365 4.147.143 6.15-3.477 7.895-6.682 1.511-2.77 2.938-5.388 5.466-5.475 2.38-.056 3.62 2.116 5.456 5.746 1.714 3.388 3.658 7.226 7.73 7.373l.224.004c4.066 0 5.996-4.08 7.704-7.689 1.511-3.198 2.942-6.21 5.397-6.334Z",fillRule:"evenodd"})),G||(G=B.createElement("path",{d:"M308.887 516.783h53.083V463.7h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),V||(V=B.createElement("path",{d:"M388.511 483.606a6.602 6.602 0 0 0-.848.085c-.05-.2-.098-.4-.154-.599a6.627 6.627 0 1 0-6.557-11.382q-.22-.225-.444-.446a6.624 6.624 0 1 0-11.397-6.564c-.197-.055-.394-.102-.59-.152a6.64 6.64 0 1 0-13.102 0c-.196.05-.394.097-.59.152a6.628 6.628 0 1 0-11.397 6.564 26.528 26.528 0 1 0 44.231 25.528 6.621 6.621 0 1 0 .848-13.186",fill:"#44d860",fillRule:"evenodd"})),_||(_=B.createElement("path",{d:"M308.887 437.158h53.083v-26.541h-53.083Z",fill:"#3ecc5f",fillRule:"evenodd"})),S||(S=B.createElement("path",{d:"M375.24 427.205a3.318 3.318 0 1 0 0-6.635 3.411 3.411 0 0 0-.423.042c-.026-.1-.05-.199-.077-.298a3.319 3.319 0 0 0-1.278-6.38 3.282 3.282 0 0 0-2.001.688q-.11-.113-.223-.223a3.282 3.282 0 0 0 .671-1.983 3.318 3.318 0 0 0-6.37-1.299 13.27 13.27 0 1 0 0 25.541 3.318 3.318 0 0 0 6.37-1.3 3.282 3.282 0 0 0-.671-1.982q.113-.11.223-.223a3.282 3.282 0 0 0 2.001.688 3.318 3.318 0 0 0 1.278-6.38c.028-.098.05-.199.077-.298a3.413 3.413 0 0 0 .423.042",fill:"#44d860",fillRule:"evenodd"})),j||(j=B.createElement("path",{d:"M388.511 329.334a3.603 3.603 0 0 1-.65-.067 3.344 3.344 0 0 1-.624-.185 3.447 3.447 0 0 1-.572-.306 5.027 5.027 0 0 1-.504-.411 3.887 3.887 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.602 3.602 0 0 1 .067-.65 3.826 3.826 0 0 1 .184-.624 3.489 3.489 0 0 1 .307-.57 3.197 3.197 0 0 1 .914-.916 3.447 3.447 0 0 1 .572-.305 3.344 3.344 0 0 1 .624-.186 3.07 3.07 0 0 1 1.3 0 3.223 3.223 0 0 1 1.195.49 5.028 5.028 0 0 1 .504.412 4.88 4.88 0 0 1 .411.504 3.382 3.382 0 0 1 .306.571 3.478 3.478 0 0 1 .252 1.274 3.364 3.364 0 0 1-.969 2.349 5.027 5.027 0 0 1-.504.411 3.306 3.306 0 0 1-1.845.558m26.542-1.66a3.388 3.388 0 0 1-2.35-.968 5.042 5.042 0 0 1-.41-.504 3.275 3.275 0 0 1-.558-1.845 3.387 3.387 0 0 1 .967-2.349 5.026 5.026 0 0 1 .505-.411 3.447 3.447 0 0 1 .572-.305 3.343 3.343 0 0 1 .623-.186 3.07 3.07 0 0 1 1.3 0 3.224 3.224 0 0 1 1.195.49 5.026 5.026 0 0 1 .504.412 3.388 3.388 0 0 1 .97 2.35 3.726 3.726 0 0 1-.067.65 3.374 3.374 0 0 1-.186.623 4.715 4.715 0 0 1-.305.57 4.88 4.88 0 0 1-.412.505 5.026 5.026 0 0 1-.504.412 3.305 3.305 0 0 1-1.844.557",fillRule:"evenodd"})))}},8391:(e,t,a)=>{a.r(t),a.d(t,{default:()=>g});var l=a(7294),r=a(6010),n=a(9960),c=a(2263),m=a(7961),h=a(7462);const d={features:"features_t9lD",featureSvg:"featureSvg_GfXr"},i=[{title:l.createElement("a",{href:"https://makinarocks.ai/"},"MakinaRocks"),Svg:a(4002).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"Sponsored by MakinaRocks"),"\uc774 \ud504\ub85c\uc81d\ud2b8\ub294 MakinaRocks\uc758 \uc9c0\uc6d0\uc744 \ubc1b\uc544 \uc81c\uc791\ub418\uc5c8\uc2b5\ub2c8\ub2e4.")},{title:l.createElement("a",{href:"https://mlops-for-mle.github.io/tutorial"},"MLOps for MLE"),Svg:a(9722).Z,description:l.createElement(l.Fragment,null,l.createElement("p",null,"ML Engineer\ub97c \uc704\ud55c MLOps Release!"),"\uad6c\uae00\uc5d0\uc11c \uc81c\uc548\ud55c MLOps 0\ub2e8\uacc4\ub97c \uc9c1\uc811 \uad6c\ud604\ud558\uba70 MLOps \uac00 \ubb34\uc5c7\uc778\uc9c0 \uacf5\ubd80\ud560 \uc218 \uc788\ub294 \ud29c\ud1a0\ub9ac\uc5bc\uc744 \uc624\ud508\ud588\uc2b5\ub2c8\ub2e4!")}];function f(e){let{title:t,Svg:a,description:n}=e;return l.createElement("div",{className:(0,r.Z)("col col--6")},l.createElement("div",{className:"text--center"},l.createElement(a,{className:d.featureSvg,role:"img"})),l.createElement("div",{className:"text--center padding-horiz--md"},l.createElement("h3",null,t),l.createElement("p",null,n)))}function s(){return l.createElement("section",{className:d.features},l.createElement("div",{className:"container"},l.createElement("div",{className:"row"},i.map(((e,t)=>l.createElement(f,(0,h.Z)({key:t},e)))))))}const o={heroBanner:"heroBanner_qdFl",buttons:"buttons_AeoN"};function E(){const{siteConfig:e}=(0,c.Z)();return l.createElement("header",{className:(0,r.Z)("hero hero--primary",o.heroBanner)},l.createElement("div",{className:"container"},l.createElement("h1",{className:"hero__title"},e.title),l.createElement("p",{className:"hero__subtitle"},e.tagline),l.createElement("div",{className:o.buttons},l.createElement(n.Z,{className:"button button--secondary button--lg",to:"/docs/introduction/intro"},"Let's Start!"))))}function g(){const{siteConfig:e}=(0,c.Z)();return l.createElement(m.Z,{title:"MLOps for ALL",description:"Description will go into a meta tag in "},l.createElement(E,null),l.createElement("main",null,l.createElement(s,null)))}}}]); \ No newline at end of file diff --git a/en/assets/js/1f819a6a.96d064e5.js b/en/assets/js/1f819a6a.47059ed6.js similarity index 99% rename from en/assets/js/1f819a6a.96d064e5.js rename to en/assets/js/1f819a6a.47059ed6.js index 6baa11c1..fde315fc 100644 --- a/en/assets/js/1f819a6a.96d064e5.js +++ b/en/assets/js/1f819a6a.47059ed6.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1714],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>f});var a=n(7294);function o(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function r(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function i(t){for(var e=1;e=0||(o[n]=t[n]);return o}(t,e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(o[n]=t[n])}return o}var p=a.createContext({}),d=function(t){var e=a.useContext(p),n=e;return t&&(n="function"==typeof t?t(e):i(i({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(p.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,o=t.mdxType,r=t.originalType,p=t.parentName,s=l(t,["components","mdxType","originalType","parentName"]),u=d(n),m=o,f=u["".concat(p,".").concat(m)]||u[m]||c[m]||r;return n?a.createElement(f,i(i({ref:e},s),{},{components:n})):a.createElement(f,i({ref:e},s))}));function f(t,e){var n=arguments,o=e&&e.mdxType;if("string"==typeof t||o){var r=n.length,i=new Array(r);i[0]=m;var l={};for(var p in e)hasOwnProperty.call(e,p)&&(l[p]=e[p]);l.originalType=t,l[u]="string"==typeof t?t:o,i[1]=l;for(var d=2;d{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>l,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const r={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/how-to-debug",id:"kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/en/docs/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/how-to-debug.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/en/docs/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/en/docs/api-deployment/what-is-api-deployment"}},p={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...r}=t;return(0,o.kt)(u,(0,a.Z)({},s,r,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,o.kt)("p",null,"This page covers how to debug Kubeflow components."),(0,o.kt)("h2",{id:"failed-component"},"Failed Component"),(0,o.kt)("p",null,"We will modify a pipeline used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," in this page."),(0,o.kt)("p",null,"First, let's modify the pipeline so that the component fails."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,o.kt)("p",null,"The modifications are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"load_iris_data")," component for loading data, ",(0,o.kt)("inlineCode",{parentName:"li"},"None")," was injected into the ",(0,o.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," feature."),(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," component, use the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na()")," function to remove rows with na values.")),(0,o.kt)("p",null,"Now let's upload and run the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","After running, if you press Run you will see that it has failed in the ",(0,o.kt)("inlineCode",{parentName:"p"},"Train from csv")," component."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-0.png",src:n(1870).Z,width:"2826",height:"1790"})),(0,o.kt)("p",null,"Click on the failed component and check the log to see the reason for the failure."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-2.png",src:n(7955).Z,width:"2826",height:"1796"})),(0,o.kt)("p",null,"If the log shows that the data count is 0 and the component did not run, there may be an issue with the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","Let's investigate what might be the problem."),(0,o.kt)("p",null,"First, click on the component and go to the Input/Output tab to download the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","You can click on the link indicated by the red square to download the data."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-5.png",src:n(2935).Z,width:"2690",height:"1740"})),(0,o.kt)("p",null,"Download both files to the same location. Then navigate to the specified path and check the downloaded files."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,o.kt)("p",null,"There are two files as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,o.kt)("p",null,"I will try to unzip it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,o.kt)("p",null,"And then run the component code using a Jupyter notebook.\n",(0,o.kt)("img",{alt:"debug-3.png",src:n(2451).Z,width:"2434",height:"1690"})),(0,o.kt)("p",null,"Debugging revealed that dropping the data was based on rows instead of columns, resulting in all the data being removed.\nNow that we know the cause of the problem, we can modify the component to drop based on columns."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,o.kt)("p",null,"After modifying, upload the pipeline again and run it to confirm that it is running normally as follows."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-6.png",src:n(5833).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},1870:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},7955:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},2451:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},2935:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},5833:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1714],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>f});var a=n(7294);function o(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function r(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function i(t){for(var e=1;e=0||(o[n]=t[n]);return o}(t,e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(o[n]=t[n])}return o}var p=a.createContext({}),d=function(t){var e=a.useContext(p),n=e;return t&&(n="function"==typeof t?t(e):i(i({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(p.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,o=t.mdxType,r=t.originalType,p=t.parentName,s=l(t,["components","mdxType","originalType","parentName"]),u=d(n),m=o,f=u["".concat(p,".").concat(m)]||u[m]||c[m]||r;return n?a.createElement(f,i(i({ref:e},s),{},{components:n})):a.createElement(f,i({ref:e},s))}));function f(t,e){var n=arguments,o=e&&e.mdxType;if("string"==typeof t||o){var r=n.length,i=new Array(r);i[0]=m;var l={};for(var p in e)hasOwnProperty.call(e,p)&&(l[p]=e[p]);l.originalType=t,l[u]="string"==typeof t?t:o,i[1]=l;for(var d=2;d{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>l,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const r={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/how-to-debug",id:"kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/en/docs/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/how-to-debug.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/en/docs/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/en/docs/api-deployment/what-is-api-deployment"}},p={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...r}=t;return(0,o.kt)(u,(0,a.Z)({},s,r,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,o.kt)("p",null,"This page covers how to debug Kubeflow components."),(0,o.kt)("h2",{id:"failed-component"},"Failed Component"),(0,o.kt)("p",null,"We will modify a pipeline used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," in this page."),(0,o.kt)("p",null,"First, let's modify the pipeline so that the component fails."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,o.kt)("p",null,"The modifications are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"load_iris_data")," component for loading data, ",(0,o.kt)("inlineCode",{parentName:"li"},"None")," was injected into the ",(0,o.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," feature."),(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," component, use the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na()")," function to remove rows with na values.")),(0,o.kt)("p",null,"Now let's upload and run the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","After running, if you press Run you will see that it has failed in the ",(0,o.kt)("inlineCode",{parentName:"p"},"Train from csv")," component."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-0.png",src:n(1870).Z,width:"2826",height:"1790"})),(0,o.kt)("p",null,"Click on the failed component and check the log to see the reason for the failure."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-2.png",src:n(7955).Z,width:"2826",height:"1796"})),(0,o.kt)("p",null,"If the log shows that the data count is 0 and the component did not run, there may be an issue with the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","Let's investigate what might be the problem."),(0,o.kt)("p",null,"First, click on the component and go to the Input/Output tab to download the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","You can click on the link indicated by the red square to download the data."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-5.png",src:n(2935).Z,width:"2690",height:"1740"})),(0,o.kt)("p",null,"Download both files to the same location. Then navigate to the specified path and check the downloaded files."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,o.kt)("p",null,"There are two files as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,o.kt)("p",null,"I will try to unzip it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,o.kt)("p",null,"And then run the component code using a Jupyter notebook.\n",(0,o.kt)("img",{alt:"debug-3.png",src:n(2451).Z,width:"2434",height:"1690"})),(0,o.kt)("p",null,"Debugging revealed that dropping the data was based on rows instead of columns, resulting in all the data being removed.\nNow that we know the cause of the problem, we can modify the component to drop based on columns."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,o.kt)("p",null,"After modifying, upload the pipeline again and run it to confirm that it is running normally as follows."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-6.png",src:n(5833).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},1870:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},7955:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},2451:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},2935:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},5833:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file diff --git a/en/assets/js/20a999a7.84992f27.js b/en/assets/js/20a999a7.41453705.js similarity index 99% rename from en/assets/js/20a999a7.84992f27.js rename to en/assets/js/20a999a7.41453705.js index 3f26d58b..4f1ad896 100644 --- a/en/assets/js/20a999a7.84992f27.js +++ b/en/assets/js/20a999a7.41453705.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9287],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function s(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var l=a.createContext({}),u=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},p=function(e){var t=u(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,l=e.parentName,p=o(e,["components","mdxType","originalType","parentName"]),d=u(n),m=i,f=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(f,s(s({ref:t},p),{},{components:n})):a.createElement(f,s({ref:t},p))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,s=new Array(r);s[0]=m;var o={};for(var l in t)hasOwnProperty.call(t,l)&&(o[l]=t[l]);o.originalType=e,o[d]="string"==typeof e?e:i,s[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>u});var a=n(7462),i=(n(7294),n(3905));const r={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,o={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/en/docs/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/en/docs/setup-components/install-components-kf"}},l={},u=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. Install NVIDIA-Docker.",id:"2-install-nvidia-docker",level:2},{value:"3. Setting NVIDIA-Docker as the Default Container Runtime",id:"3-setting-nvidia-docker-as-the-default-container-runtime",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],p={toc:u},d="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"For using GPU in Kubernetes and Kubeflow, the following tasks are required."),(0,i.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,i.kt)("p",null,"If the following screen is output when executing ",(0,i.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),", please omit this step."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("p",null,"If the output of nvidia-smi is not as above, please install the nvidia driver that fits your installed GPU."),(0,i.kt)("p",null,"If you are not familiar with the installation of nvidia drivers, please install it through the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,i.kt)("h2",{id:"2-install-nvidia-docker"},"2. Install NVIDIA-Docker."),(0,i.kt)("p",null,"Let's install NVIDIA-Docker."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,i.kt)("p",null,"To check if it is installed correctly, we will run the docker container using the GPU."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,i.kt)("p",null,"If the following message appears, it means that the installation was successful: "),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("h2",{id:"3-setting-nvidia-docker-as-the-default-container-runtime"},"3. Setting NVIDIA-Docker as the Default Container Runtime"),(0,i.kt)("p",null,"By default, Kubernetes uses Docker-CE as the default container runtime. To use NVIDIA GPU within Docker containers, you need to configure NVIDIA-Docker as the container runtime and modify the default runtime for creating pods."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Open the ",(0,i.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," file and make the following modifications:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"After confirming the file changes, restart Docker."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the changes have been applied."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the installation was successful."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,i.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Create the nvidia-device-plugin daemonset."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nvidia-device-plugin pod is in the RUNNING state."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")))),(0,i.kt)("p",null,"You should see the following output:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n")),(0,i.kt)("ol",{start:3},(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nodes have been configured to have GPUs available."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the configuration was successful.",(0,i.kt)("br",{parentName:"p"}),"\n","(",(0,i.kt)("em",{parentName:"p"},"In the "),"MLOps for ALL* tutorial cluster, there are two GPUs, so the output is 2.\nIf the output shows the correct number of GPUs for your cluster, it is fine.)"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")),(0,i.kt)("p",{parentName:"li"},"If it is not configured, the GPU value will be displayed as ",(0,i.kt)("inlineCode",{parentName:"p"},""),"."))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9287],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function s(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var l=a.createContext({}),u=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},p=function(e){var t=u(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,l=e.parentName,p=o(e,["components","mdxType","originalType","parentName"]),d=u(n),m=i,f=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(f,s(s({ref:t},p),{},{components:n})):a.createElement(f,s({ref:t},p))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,s=new Array(r);s[0]=m;var o={};for(var l in t)hasOwnProperty.call(t,l)&&(o[l]=t[l]);o.originalType=e,o[d]="string"==typeof e?e:i,s[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>u});var a=n(7462),i=(n(7294),n(3905));const r={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,o={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/en/docs/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/en/docs/setup-components/install-components-kf"}},l={},u=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. Install NVIDIA-Docker.",id:"2-install-nvidia-docker",level:2},{value:"3. Setting NVIDIA-Docker as the Default Container Runtime",id:"3-setting-nvidia-docker-as-the-default-container-runtime",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],p={toc:u},d="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"For using GPU in Kubernetes and Kubeflow, the following tasks are required."),(0,i.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,i.kt)("p",null,"If the following screen is output when executing ",(0,i.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),", please omit this step."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("p",null,"If the output of nvidia-smi is not as above, please install the nvidia driver that fits your installed GPU."),(0,i.kt)("p",null,"If you are not familiar with the installation of nvidia drivers, please install it through the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,i.kt)("h2",{id:"2-install-nvidia-docker"},"2. Install NVIDIA-Docker."),(0,i.kt)("p",null,"Let's install NVIDIA-Docker."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,i.kt)("p",null,"To check if it is installed correctly, we will run the docker container using the GPU."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,i.kt)("p",null,"If the following message appears, it means that the installation was successful: "),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("h2",{id:"3-setting-nvidia-docker-as-the-default-container-runtime"},"3. Setting NVIDIA-Docker as the Default Container Runtime"),(0,i.kt)("p",null,"By default, Kubernetes uses Docker-CE as the default container runtime. To use NVIDIA GPU within Docker containers, you need to configure NVIDIA-Docker as the container runtime and modify the default runtime for creating pods."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Open the ",(0,i.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," file and make the following modifications:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"After confirming the file changes, restart Docker."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the changes have been applied."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the installation was successful."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,i.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Create the nvidia-device-plugin daemonset."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nvidia-device-plugin pod is in the RUNNING state."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")))),(0,i.kt)("p",null,"You should see the following output:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n")),(0,i.kt)("ol",{start:3},(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nodes have been configured to have GPUs available."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the configuration was successful.",(0,i.kt)("br",{parentName:"p"}),"\n","(",(0,i.kt)("em",{parentName:"p"},"In the "),"MLOps for ALL* tutorial cluster, there are two GPUs, so the output is 2.\nIf the output shows the correct number of GPUs for your cluster, it is fine.)"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")),(0,i.kt)("p",{parentName:"li"},"If it is not configured, the GPU value will be displayed as ",(0,i.kt)("inlineCode",{parentName:"p"},""),"."))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/24605d3f.66b3435d.js b/en/assets/js/24605d3f.0d4164e1.js similarity index 99% rename from en/assets/js/24605d3f.66b3435d.js rename to en/assets/js/24605d3f.0d4164e1.js index 5e0bfd9b..0b33528a 100644 --- a/en/assets/js/24605d3f.66b3435d.js +++ b/en/assets/js/24605d3f.0d4164e1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4095],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},u=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||o;return n?a.createElement(k,l(l({ref:t},u),{},{components:n})):a.createElement(k,l({ref:t},u))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[p]="string"==typeof e?e:r,l[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/command",id:"prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/en/docs/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/command.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/en/docs/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/en/docs/prerequisites/docker/images"}},s={},c=[{value:"1. Normal installation confirmation",id:"1-normal-installation-confirmation",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(p,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-normal-installation-confirmation"},"1. Normal installation confirmation"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"If installed correctly, you should be able to see the following message."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," If you want to use without sudo, please refer to the following site."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"Docker pull is a command to download Docker images from a Docker image registry (a repository where Docker images are stored and shared)."),(0,r.kt)("p",null,"You can check the arguments available in docker pull using the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"If performed normally, it prints out as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"It can be seen here that docker pull takes two types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"In order to use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," options from help, they must be used before the NAME.\nLet's try and pull the ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," image directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"If interpreted correctly, the command means to pull an image with the tag ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," from an image named ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu"),"."),(0,r.kt)("p",null,"If performed successfully, it will produce an output similar to the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"If you perform the above command, you will download the image called 'ubuntu:18.04' from a registry named ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," to your laptop."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Note that ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"in the future, if you need to get a docker image from a certain ",(0,r.kt)("strong",{parentName:"li"},"private")," registry instead of docker.io or public docker hub, you can use ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," to point to the certain registry, then use ",(0,r.kt)("inlineCode",{parentName:"li"},"docker pull"),". Alternatively, you can set up an ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"insecure registry"),". "),(0,r.kt)("li",{parentName:"ul"},"Also note that ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save"))," and ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," commands are available to store and share docker images in the form of ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," file in an intranet.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"This is the command to list the Docker images that exist locally."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"The arguments available for use in docker images are as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"Let's try executing the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"If you install Docker and proceed with this practice, it will output something similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"If you use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," argument among the possible arguments, only the ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," will be printed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"Command to output the list of currently running Docker containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"Use the following arguments can be used with 'docker ps':"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"Let's try running the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"If there are no currently running containers, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"If there is a container running, it will look similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"Command to run a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"The command to run docker run is as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"What we need to confirm here is that the docker run command takes three types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"Let's try running a docker container ourselves."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it"),": Combination of ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," and ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," options",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Runs the container and connects it to an interactive terminal"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name"),": Assigns a name to the container for easier identification instead of using the container ID"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),": Specifies the command to be executed in the container upon startup, where ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," opens a bash shell.")),(0,r.kt)("p",null,"After running the command, you can exit the container by using the ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," command."),(0,r.kt)("p",null,"When you enter the previously learned ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," command, the following output will be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"It was said that the container being executed was coming out, but for some reason the container that was just executed does not appear. The reason is that ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," shows the currently running containers by default. If you want to see the stopped containers too, you must give the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," option."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"Then the list of terminated containers will also be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker exec is a command that is used to issue commands or access the inside of a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"For example, let's try running the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"Here, the ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," option is a command that allows the Docker container to run in the background so that even if the connection ends to the container, it continues to run."),(0,r.kt)("p",null,"Use ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," to check if it is currently running."),(0,r.kt)("p",null,"It can be confirmed that it is running as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"Now let's connect to the running docker container through the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"This is the same as the previous ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command, allowing you to access the inside of the container."),(0,r.kt)("p",null,"You can exit using ",(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"I will have the following container be executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,'By using the above command, we have set up a busybox container named "test" as a Docker container in the background and printed the current time once every second.'),(0,r.kt)("p",null,"Now let's check the log with the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"If performed normally, it will be similar to below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"However, if used this way, you can only check the logs taken so far.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, you can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," option to keep watching and outputting."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"Command to stop a running Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,"Through ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", you can check the containers currently running, as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"Now let's try to stop Docker with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"After executing, type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," again."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"Comparing with the above result, you can see that the demo2 container has disappeared from the list of currently running containers.\nThe rest of the containers will also be stopped."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("p",null,"Docker rm: Command to delete a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"Docker containers are in a stopped state by default. That's why you can see stopped containers using ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),".\nBut why do we have to delete the stopped containers?",(0,r.kt)("br",{parentName:"p"}),"\n","Even when stopped, the data used in the Docker remains in the container.\nSo you can restart the container through restarting. But this process will use disk.\nSo\nin order to delete the containers that are not used at all, we should use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," command."),(0,r.kt)("p",null," First, let's check the current containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"There are three containers as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"Let's try to delete the 'demo3' container through the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,"The command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," reduced it to two lines as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"Delete the remaining containers as well."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"Command to delete a Docker image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"Use the following commands to check which images are currently on the local."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"The following is output."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"I will try to delete the ",(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"If you type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," again, the following will appear."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4095],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},u=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||o;return n?a.createElement(k,l(l({ref:t},u),{},{components:n})):a.createElement(k,l({ref:t},u))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[p]="string"==typeof e?e:r,l[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/command",id:"prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/en/docs/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/command.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/en/docs/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/en/docs/prerequisites/docker/images"}},s={},c=[{value:"1. Normal installation confirmation",id:"1-normal-installation-confirmation",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(p,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-normal-installation-confirmation"},"1. Normal installation confirmation"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"If installed correctly, you should be able to see the following message."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," If you want to use without sudo, please refer to the following site."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"Docker pull is a command to download Docker images from a Docker image registry (a repository where Docker images are stored and shared)."),(0,r.kt)("p",null,"You can check the arguments available in docker pull using the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"If performed normally, it prints out as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"It can be seen here that docker pull takes two types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"In order to use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," options from help, they must be used before the NAME.\nLet's try and pull the ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," image directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"If interpreted correctly, the command means to pull an image with the tag ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," from an image named ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu"),"."),(0,r.kt)("p",null,"If performed successfully, it will produce an output similar to the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"If you perform the above command, you will download the image called 'ubuntu:18.04' from a registry named ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," to your laptop."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Note that ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"in the future, if you need to get a docker image from a certain ",(0,r.kt)("strong",{parentName:"li"},"private")," registry instead of docker.io or public docker hub, you can use ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," to point to the certain registry, then use ",(0,r.kt)("inlineCode",{parentName:"li"},"docker pull"),". Alternatively, you can set up an ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"insecure registry"),". "),(0,r.kt)("li",{parentName:"ul"},"Also note that ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save"))," and ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," commands are available to store and share docker images in the form of ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," file in an intranet.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"This is the command to list the Docker images that exist locally."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"The arguments available for use in docker images are as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"Let's try executing the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"If you install Docker and proceed with this practice, it will output something similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"If you use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," argument among the possible arguments, only the ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," will be printed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"Command to output the list of currently running Docker containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"Use the following arguments can be used with 'docker ps':"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"Let's try running the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"If there are no currently running containers, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"If there is a container running, it will look similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"Command to run a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"The command to run docker run is as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"What we need to confirm here is that the docker run command takes three types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"Let's try running a docker container ourselves."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it"),": Combination of ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," and ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," options",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Runs the container and connects it to an interactive terminal"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name"),": Assigns a name to the container for easier identification instead of using the container ID"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),": Specifies the command to be executed in the container upon startup, where ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," opens a bash shell.")),(0,r.kt)("p",null,"After running the command, you can exit the container by using the ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," command."),(0,r.kt)("p",null,"When you enter the previously learned ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," command, the following output will be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"It was said that the container being executed was coming out, but for some reason the container that was just executed does not appear. The reason is that ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," shows the currently running containers by default. If you want to see the stopped containers too, you must give the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," option."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"Then the list of terminated containers will also be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker exec is a command that is used to issue commands or access the inside of a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"For example, let's try running the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"Here, the ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," option is a command that allows the Docker container to run in the background so that even if the connection ends to the container, it continues to run."),(0,r.kt)("p",null,"Use ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," to check if it is currently running."),(0,r.kt)("p",null,"It can be confirmed that it is running as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"Now let's connect to the running docker container through the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"This is the same as the previous ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command, allowing you to access the inside of the container."),(0,r.kt)("p",null,"You can exit using ",(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"I will have the following container be executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,'By using the above command, we have set up a busybox container named "test" as a Docker container in the background and printed the current time once every second.'),(0,r.kt)("p",null,"Now let's check the log with the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"If performed normally, it will be similar to below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"However, if used this way, you can only check the logs taken so far.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, you can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," option to keep watching and outputting."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"Command to stop a running Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,"Through ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", you can check the containers currently running, as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"Now let's try to stop Docker with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"After executing, type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," again."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"Comparing with the above result, you can see that the demo2 container has disappeared from the list of currently running containers.\nThe rest of the containers will also be stopped."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("p",null,"Docker rm: Command to delete a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"Docker containers are in a stopped state by default. That's why you can see stopped containers using ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),".\nBut why do we have to delete the stopped containers?",(0,r.kt)("br",{parentName:"p"}),"\n","Even when stopped, the data used in the Docker remains in the container.\nSo you can restart the container through restarting. But this process will use disk.\nSo\nin order to delete the containers that are not used at all, we should use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," command."),(0,r.kt)("p",null," First, let's check the current containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"There are three containers as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"Let's try to delete the 'demo3' container through the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,"The command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," reduced it to two lines as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"Delete the remaining containers as well."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"Command to delete a Docker image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"Use the following commands to check which images are currently on the local."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"The following is output."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"I will try to delete the ",(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"If you type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," again, the following will appear."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/2842b95f.9ae3b3ef.js b/en/assets/js/2842b95f.849081d7.js similarity index 98% rename from en/assets/js/2842b95f.9ae3b3ef.js rename to en/assets/js/2842b95f.849081d7.js index 01a3daec..4e41e663 100644 --- a/en/assets/js/2842b95f.9ae3b3ef.js +++ b/en/assets/js/2842b95f.849081d7.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[924],{3905:(e,i,t)=>{t.d(i,{Zo:()=>c,kt:()=>b});var n=t(7294);function l(e,i,t){return i in e?Object.defineProperty(e,i,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[i]=t,e}function p(e,i){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);i&&(n=n.filter((function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable}))),t.push.apply(t,n)}return t}function a(e){for(var i=1;i=0||(l[t]=e[t]);return l}(e,i);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var r=n.createContext({}),s=function(e){var i=n.useContext(r),t=i;return e&&(t="function"==typeof e?e(i):a(a({},i),e)),t},c=function(e){var i=s(e.components);return n.createElement(r.Provider,{value:i},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var i=e.children;return n.createElement(n.Fragment,{},i)}},g=n.forwardRef((function(e,i){var t=e.components,l=e.mdxType,p=e.originalType,r=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),u=s(t),g=l,b=u["".concat(r,".").concat(g)]||u[g]||d[g]||p;return t?n.createElement(b,a(a({ref:i},c),{},{components:t})):n.createElement(b,a({ref:i},c))}));function b(e,i){var t=arguments,l=i&&i.mdxType;if("string"==typeof e||l){var p=t.length,a=new Array(p);a[0]=g;var o={};for(var r in i)hasOwnProperty.call(i,r)&&(o[r]=i[r]);o.originalType=e,o[u]="string"==typeof e?e:l,a[1]=o;for(var s=2;s{t.r(i),t.d(i,{assets:()=>r,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>s});var n=t(7462),l=(t(7294),t(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,o={unversionedId:"kubeflow/basic-pipeline-upload",id:"version-1.0/kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/en/docs/1.0/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline-upload.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/en/docs/1.0/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/en/docs/1.0/kubeflow/basic-run"}},r={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Click Pipelines Tab",id:"1-click-pipelines-tab",level:3},{value:"2. Click Upload Pipeline",id:"2-click-upload-pipeline",level:3},{value:"3. Click Choose file",id:"3-click-choose-file",level:3},{value:"4. Upload created yaml file",id:"4-upload-created-yaml-file",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:i,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:i,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"Now, let's upload the pipeline we created directly to kubeflow.",(0,l.kt)("br",{parentName:"p"}),"\n","Pipeline uploads can be done through the kubeflow dashboard UI.\nUse the method used in ",(0,l.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-kf"},"Install Kubeflow")," to do port forwarding."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,"Access ",(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to open the dashboard."),(0,l.kt)("h3",{id:"1-click-pipelines-tab"},"1. Click Pipelines Tab"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:t(1962).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-click-upload-pipeline"},"2. Click Upload Pipeline"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:t(5675).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-click-choose-file"},"3. Click Choose file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:t(4964).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-upload-created-yaml-file"},"4. Upload created yaml file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:t(1777).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(1636).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"The uploaded pipeline allows you to manage versions through uploads. However, it serves the role of gathering pipelines with the same name rather than version management at the code level, such as Github.\nIn the example above, clicking on example_pipeline will bring up the following screen."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:t(5050).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click this screen shows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(1636).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click Upload Version, a screen appears where you can upload the pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:t(2485).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Now, upload your pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:t(6958).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Once uploaded, you can check the pipeline version as follows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:t(1912).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},1962:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},5675:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},4964:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},1777:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},1636:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},5050:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},2485:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},6958:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},1912:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[924],{3905:(e,i,t)=>{t.d(i,{Zo:()=>c,kt:()=>b});var n=t(7294);function l(e,i,t){return i in e?Object.defineProperty(e,i,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[i]=t,e}function p(e,i){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);i&&(n=n.filter((function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable}))),t.push.apply(t,n)}return t}function a(e){for(var i=1;i=0||(l[t]=e[t]);return l}(e,i);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var r=n.createContext({}),s=function(e){var i=n.useContext(r),t=i;return e&&(t="function"==typeof e?e(i):a(a({},i),e)),t},c=function(e){var i=s(e.components);return n.createElement(r.Provider,{value:i},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var i=e.children;return n.createElement(n.Fragment,{},i)}},g=n.forwardRef((function(e,i){var t=e.components,l=e.mdxType,p=e.originalType,r=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),u=s(t),g=l,b=u["".concat(r,".").concat(g)]||u[g]||d[g]||p;return t?n.createElement(b,a(a({ref:i},c),{},{components:t})):n.createElement(b,a({ref:i},c))}));function b(e,i){var t=arguments,l=i&&i.mdxType;if("string"==typeof e||l){var p=t.length,a=new Array(p);a[0]=g;var o={};for(var r in i)hasOwnProperty.call(i,r)&&(o[r]=i[r]);o.originalType=e,o[u]="string"==typeof e?e:l,a[1]=o;for(var s=2;s{t.r(i),t.d(i,{assets:()=>r,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>s});var n=t(7462),l=(t(7294),t(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,o={unversionedId:"kubeflow/basic-pipeline-upload",id:"version-1.0/kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/en/docs/1.0/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-pipeline-upload.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/en/docs/1.0/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/en/docs/1.0/kubeflow/basic-run"}},r={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Click Pipelines Tab",id:"1-click-pipelines-tab",level:3},{value:"2. Click Upload Pipeline",id:"2-click-upload-pipeline",level:3},{value:"3. Click Choose file",id:"3-click-choose-file",level:3},{value:"4. Upload created yaml file",id:"4-upload-created-yaml-file",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:i,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:i,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"Now, let's upload the pipeline we created directly to kubeflow.",(0,l.kt)("br",{parentName:"p"}),"\n","Pipeline uploads can be done through the kubeflow dashboard UI.\nUse the method used in ",(0,l.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-kf"},"Install Kubeflow")," to do port forwarding."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,"Access ",(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to open the dashboard."),(0,l.kt)("h3",{id:"1-click-pipelines-tab"},"1. Click Pipelines Tab"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:t(1962).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-click-upload-pipeline"},"2. Click Upload Pipeline"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:t(5675).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-click-choose-file"},"3. Click Choose file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:t(4964).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-upload-created-yaml-file"},"4. Upload created yaml file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:t(1777).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(1636).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"The uploaded pipeline allows you to manage versions through uploads. However, it serves the role of gathering pipelines with the same name rather than version management at the code level, such as Github.\nIn the example above, clicking on example_pipeline will bring up the following screen."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:t(5050).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click this screen shows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(1636).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click Upload Version, a screen appears where you can upload the pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:t(2485).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Now, upload your pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:t(6958).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Once uploaded, you can check the pipeline version as follows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:t(1912).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},1962:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},5675:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},4964:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},1777:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},1636:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},5050:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},2485:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},6958:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},1912:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file diff --git a/en/assets/js/2b1aa4ae.f8687437.js b/en/assets/js/2b1aa4ae.f9171ba1.js similarity index 97% rename from en/assets/js/2b1aa4ae.f8687437.js rename to en/assets/js/2b1aa4ae.f9171ba1.js index 8980cc55..51b272eb 100644 --- a/en/assets/js/2b1aa4ae.f8687437.js +++ b/en/assets/js/2b1aa4ae.f9171ba1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4660],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),f=o,d=u["".concat(p,".").concat(f)]||u[f]||m[f]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=f;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/en/docs/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-requirements.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/en/docs/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/en/docs/kubeflow/basic-component"}},p={},s=[],c={toc:s},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. Python Virtual Environment")," and install the packages on the ",(0,o.kt)("strong",{parentName:"p"},"client node"),"."),(0,o.kt)("p",null,"The packages and versions required for the practice are as follows:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,"Activate the ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"Python virtual environment")," created in the previous section."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"We are proceeding with the package installation."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4660],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var p=r.createContext({}),s=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=s(e.components);return r.createElement(p.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=s(n),f=o,d=u["".concat(p,".").concat(f)]||u[f]||m[f]||a;return n?r.createElement(d,i(i({ref:t},c),{},{components:n})):r.createElement(d,i({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=f;var l={};for(var p in t)hasOwnProperty.call(t,p)&&(l[p]=t[p]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>s});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/en/docs/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-requirements.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/en/docs/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/en/docs/kubeflow/basic-component"}},p={},s=[],c={toc:s},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. Python Virtual Environment")," and install the packages on the ",(0,o.kt)("strong",{parentName:"p"},"client node"),"."),(0,o.kt)("p",null,"The packages and versions required for the practice are as follows:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,"Activate the ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"Python virtual environment")," created in the previous section."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"We are proceeding with the package installation."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/2b3f5e4d.a9ffc145.js b/en/assets/js/2b3f5e4d.38c43856.js similarity index 99% rename from en/assets/js/2b3f5e4d.a9ffc145.js rename to en/assets/js/2b3f5e4d.38c43856.js index 62a99a02..07406ed6 100644 --- a/en/assets/js/2b3f5e4d.a9ffc145.js +++ b/en/assets/js/2b3f5e4d.38c43856.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6005],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var i=a.createContext({}),u=function(e){var t=a.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=u(e.components);return a.createElement(i.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(n),m=r,k=p["".concat(i,".").concat(m)]||p[m]||d[m]||o;return n?a.createElement(k,l(l({ref:t},c),{},{components:n})):a.createElement(k,l({ref:t},c))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[p]="string"==typeof e?e:r,l[1]=s;for(var u=2;u{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>u});var a=n(7462),r=(n(7294),n(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"version-1.0/setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/en/docs/1.0/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-prerequisite.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/en/docs/1.0/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},u=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],c={toc:u},p="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(p,(0,a.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"On this page, we describe the components that need to be installed or configured on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,r.kt)("strong",{parentName:"p"},"Client")," prior to installing Kubernetes."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"In order to enable smooth communication between the Client and the Cluster, Port-Forwarding needs to be performed. To enable Port-Forwarding, the following packages need to be installed on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install apt packages for docker."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"add docker official GPG key."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"When installing Docker using the apt package manager, configure it to retrieve from the stable repository:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check the currently available Docker versions for installation:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"Verify if the version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," is listed among the output:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"If the addition was successful, the following output will be displayed:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install Docker version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal"),":"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check docker is installed."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")))),(0,r.kt)("p",null," If added successfully, it will output as follows:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')),(0,r.kt)("ol",{start:7},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Add permissions to use Docker commands without the ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo")," keyword by executing the following commands:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"To verify that you can now use Docker commands without ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo"),", run the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command again:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message after executing the command, it means that the permissions have been successfully added:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"In order for kubelet to work properly, ",(0,r.kt)("strong",{parentName:"p"},"cluster")," nodes must turn off the virtual memory called swap. The following command turns off the swap.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Download kubectl version v1.21.7 to the current folder:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Change the file permissions and move it to the appropriate location to make kubectl executable:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Verify that kubectl is installed correctly:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message, it means that kubectl is installed successfully:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"Configuring Multiple kubeconfig on Your Machine")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"kubectx - Switch between Kubernetes contexts easily"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/docs/tasks/tools/install-kubectl-linux/"},"Install and Set Up kubectl on Linux"))))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6005],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var i=a.createContext({}),u=function(e){var t=a.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=u(e.components);return a.createElement(i.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(n),m=r,k=p["".concat(i,".").concat(m)]||p[m]||d[m]||o;return n?a.createElement(k,l(l({ref:t},c),{},{components:n})):a.createElement(k,l({ref:t},c))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[p]="string"==typeof e?e:r,l[1]=s;for(var u=2;u{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>u});var a=n(7462),r=(n(7294),n(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"version-1.0/setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/en/docs/1.0/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-prerequisite.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/en/docs/1.0/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},u=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],c={toc:u},p="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(p,(0,a.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"On this page, we describe the components that need to be installed or configured on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,r.kt)("strong",{parentName:"p"},"Client")," prior to installing Kubernetes."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"In order to enable smooth communication between the Client and the Cluster, Port-Forwarding needs to be performed. To enable Port-Forwarding, the following packages need to be installed on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install apt packages for docker."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"add docker official GPG key."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"When installing Docker using the apt package manager, configure it to retrieve from the stable repository:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check the currently available Docker versions for installation:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"Verify if the version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," is listed among the output:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"If the addition was successful, the following output will be displayed:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install Docker version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal"),":"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check docker is installed."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")))),(0,r.kt)("p",null," If added successfully, it will output as follows:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')),(0,r.kt)("ol",{start:7},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Add permissions to use Docker commands without the ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo")," keyword by executing the following commands:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"To verify that you can now use Docker commands without ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo"),", run the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command again:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message after executing the command, it means that the permissions have been successfully added:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"In order for kubelet to work properly, ",(0,r.kt)("strong",{parentName:"p"},"cluster")," nodes must turn off the virtual memory called swap. The following command turns off the swap.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Download kubectl version v1.21.7 to the current folder:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Change the file permissions and move it to the appropriate location to make kubectl executable:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Verify that kubectl is installed correctly:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message, it means that kubectl is installed successfully:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"Configuring Multiple kubeconfig on Your Machine")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"kubectx - Switch between Kubernetes contexts easily"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/docs/tasks/tools/install-kubectl-linux/"},"Install and Set Up kubectl on Linux"))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/2de7f827.ede8fe61.js b/en/assets/js/2de7f827.9e842577.js similarity index 99% rename from en/assets/js/2de7f827.ede8fe61.js rename to en/assets/js/2de7f827.9e842577.js index 9b5e8a1b..97d404e7 100644 --- a/en/assets/js/2de7f827.ede8fe61.js +++ b/en/assets/js/2de7f827.9e842577.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3222],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>g});var n=t(7294);function l(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function o(e){for(var a=1;a=0||(l[t]=e[t]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):o(o({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},m=n.forwardRef((function(e,a){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=p(t),m=l,g=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return t?n.createElement(g,o(o({ref:a},c),{},{components:t})):n.createElement(g,o({ref:a},c))}));function g(e,a){var t=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=m;var s={};for(var i in a)hasOwnProperty.call(a,i)&&(s[i]=a[i]);s.originalType=e,s[d]="string"==typeof e?e:l,o[1]=s;for(var p=2;p{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=t(7462),l=(t(7294),t(3905));const r={title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},o=void 0,s={unversionedId:"appendix/metallb",id:"appendix/metallb",title:"2. Install load balancer metallb for Bare Metal Cluster",description:"What is MetalLB?",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/en/docs/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/metallb.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Install Python virtual environment",permalink:"/en/docs/appendix/pyenv"},next:{title:"Further Readings",permalink:"/en/docs/further-readings/info"}},i={},p=[{value:"What is MetalLB?",id:"what-is-metallb",level:2},{value:"Installing MetalLB",id:"installing-metallb",level:2},{value:"Requirements",id:"requirements",level:2},{value:"MetalLB Installation",id:"metallb-installation",level:3},{value:"Preparation",id:"preparation",level:4},{value:"Installation - Manifest",id:"installation---manifest",level:3},{value:"1. Install MetalLB.",id:"1-install-metallb",level:4},{value:"2. Check installation.",id:"2-check-installation",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"Using MetalLB",id:"using-metallb",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},d="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(d,(0,n.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"what-is-metallb"},"What is MetalLB?"),(0,l.kt)("h2",{id:"installing-metallb"},"Installing MetalLB"),(0,l.kt)("p",null,"When using Kubernetes on cloud platforms such as AWS, GCP, and Azure, they provide their own load balancers. However, for on-premises clusters, an additional module needs to be installed to enable load balancing. ",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB")," is an open-source project that provides a load balancer for bare metal environments."),(0,l.kt)("h2",{id:"requirements"},"Requirements"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Requirement"),(0,l.kt)("th",{parentName:"tr",align:null},"Version and Details"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"Version >= v1.13.0 without built-in load balancing")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"Compatible Network CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 addresses"),(0,l.kt)("td",{parentName:"tr",align:null},"Used for MetalLB deployment")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP mode"),(0,l.kt)("td",{parentName:"tr",align:null},"One or more routers that support BGP functionality")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"TCP/UDP port 7946 open between nodes"),(0,l.kt)("td",{parentName:"tr",align:null},"Memberlist requirement")))),(0,l.kt)("h3",{id:"metallb-installation"},"MetalLB Installation"),(0,l.kt)("h4",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"If you are using kube-proxy in IPVS mode, starting from Kubernetes v1.14.2, you need to enable strict ARP mode.",(0,l.kt)("br",{parentName:"p"}),"\n","By default, Kube-router enables strict ARP, so this feature is not required if you are using Kube-router as a service proxy.",(0,l.kt)("br",{parentName:"p"}),"\n","Before applying strict ARP mode, check the current mode."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"If strictARP: false is outputted, run the following to change it to strictARP: true.\n(If strictARP: true is already outputted, you do not need to execute the following command)."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"If performed normally, it will be output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"installation---manifest"},"Installation - Manifest"),(0,l.kt)("h4",{id:"1-install-metallb"},"1. Install MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-check-installation"},"2. Check installation."),(0,l.kt)("p",null,"Wait until both pods in the metallb-system namespace are Running."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"When everthing is Running, similar results will be output."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"The components of the manifest are as follows:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a deployment, responsible for assigning external IP addresses for load balancing."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a daemonset, responsible for configuring network communication to connect external traffic and services.")))),(0,l.kt)("p",null,"The service includes RBAC permissions which are necessary for the controller and speaker components to operate."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"Setting up the load balancing policy of MetalLB can be done by deploying a configmap containing the related configuration information."),(0,l.kt)("p",null,"There are two modes that can be configured in MetalLB:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 Mode")," "),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP Mode")," ")),(0,l.kt)("p",null,"Here we will proceed with Layer 2 mode."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"In the Layer 2 mode, it is enough to set only the range of IP addresses to be used simply.",(0,l.kt)("br",{parentName:"p"}),"\n","When using Layer 2 mode, it is not necessary to bind IP to the network interface of the worker node, because it operates in a way that it responds directly to the ARP request of the local network and provides the computer's MAC address to the client."),(0,l.kt)("p",null,"The following ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," file is the configuration for MetalLB to provide control over the IP range of 192.168.35.100 ~ 192.168.35.110, and to configure Layer 2 mode."),(0,l.kt)("p",null,"In case the cluster node and the client node are separated, the range of 192.168.35.100 ~ 192.168.35.110 must be accessible by both the client node and the cluster node."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"Apply the above settings."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"If deployed normally, it will output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"using-metallb"},"Using MetalLB"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"First, before getting the load-balancing feature from MetalLB, check the current status by changing the type of the istio-ingressgateway service in the istio-system namespace to ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer")," to provide the Kubeflow Dashboard."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input a desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, IP addresses will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"If you check again, you will see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," to verify the following screen is output."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:t(4232).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of minio-service, which provides the Dashboard of minio, in the kubeflow namespace to LoadBalancer to receive the load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item. If you do not add, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," to confirm the following screen is printed. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:t(9706).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of mlflow-server-service service in the mlflow-system namespace that provides the mlflow Dashboard to LoadBalancer to receive load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input the desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Open the web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," to confirm the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:t(529).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"First, check the current status before changing the type of seldon-core-analytics-grafana service in the seldon-system namespace which provides Grafana's Dashboard to receive Load Balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"The type of the corresponding service is ClusterIP, and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If not, an IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If you check again, you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Open the Web Browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," to confirm that the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:t(428).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},428:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},4232:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},9706:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},529:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3222],{3905:(e,a,t)=>{t.d(a,{Zo:()=>c,kt:()=>g});var n=t(7294);function l(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function o(e){for(var a=1;a=0||(l[t]=e[t]);return l}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=n.createContext({}),p=function(e){var a=n.useContext(i),t=a;return e&&(t="function"==typeof e?e(a):o(o({},a),e)),t},c=function(e){var a=p(e.components);return n.createElement(i.Provider,{value:a},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},m=n.forwardRef((function(e,a){var t=e.components,l=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),d=p(t),m=l,g=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return t?n.createElement(g,o(o({ref:a},c),{},{components:t})):n.createElement(g,o({ref:a},c))}));function g(e,a){var t=arguments,l=a&&a.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=m;var s={};for(var i in a)hasOwnProperty.call(a,i)&&(s[i]=a[i]);s.originalType=e,s[d]="string"==typeof e?e:l,o[1]=s;for(var p=2;p{t.r(a),t.d(a,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=t(7462),l=(t(7294),t(3905));const r={title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},o=void 0,s={unversionedId:"appendix/metallb",id:"appendix/metallb",title:"2. Install load balancer metallb for Bare Metal Cluster",description:"What is MetalLB?",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/appendix/metallb.md",sourceDirName:"appendix",slug:"/appendix/metallb",permalink:"/en/docs/appendix/metallb",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/metallb.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Install load balancer metallb for Bare Metal Cluster",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"1. Install Python virtual environment",permalink:"/en/docs/appendix/pyenv"},next:{title:"Further Readings",permalink:"/en/docs/further-readings/info"}},i={},p=[{value:"What is MetalLB?",id:"what-is-metallb",level:2},{value:"Installing MetalLB",id:"installing-metallb",level:2},{value:"Requirements",id:"requirements",level:2},{value:"MetalLB Installation",id:"metallb-installation",level:3},{value:"Preparation",id:"preparation",level:4},{value:"Installation - Manifest",id:"installation---manifest",level:3},{value:"1. Install MetalLB.",id:"1-install-metallb",level:4},{value:"2. Check installation.",id:"2-check-installation",level:4},{value:"Configuration",id:"configuration",level:2},{value:"Layer 2 Configuration",id:"layer-2-configuration",level:3},{value:"metallb_config.yaml",id:"metallb_configyaml",level:4},{value:"Using MetalLB",id:"using-metallb",level:2},{value:"Kubeflow Dashboard",id:"kubeflow-dashboard",level:3},{value:"minio Dashboard",id:"minio-dashboard",level:3},{value:"mlflow Dashboard",id:"mlflow-dashboard",level:3},{value:"Grafana Dashboard",id:"grafana-dashboard",level:3}],c={toc:p},d="wrapper";function u(e){let{components:a,...r}=e;return(0,l.kt)(d,(0,n.Z)({},c,r,{components:a,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"what-is-metallb"},"What is MetalLB?"),(0,l.kt)("h2",{id:"installing-metallb"},"Installing MetalLB"),(0,l.kt)("p",null,"When using Kubernetes on cloud platforms such as AWS, GCP, and Azure, they provide their own load balancers. However, for on-premises clusters, an additional module needs to be installed to enable load balancing. ",(0,l.kt)("a",{parentName:"p",href:"https://metallb.universe.tf/"},"MetalLB")," is an open-source project that provides a load balancer for bare metal environments."),(0,l.kt)("h2",{id:"requirements"},"Requirements"),(0,l.kt)("table",null,(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Requirement"),(0,l.kt)("th",{parentName:"tr",align:null},"Version and Details"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,l.kt)("td",{parentName:"tr",align:null},"Version >= v1.13.0 without built-in load balancing")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},(0,l.kt)("a",{parentName:"td",href:"https://metallb.universe.tf/installation/network-addons/"},"Compatible Network CNI")),(0,l.kt)("td",{parentName:"tr",align:null},"Calico, Canal, Cilium, Flannel, Kube-ovn, Kube-router, Weave Net")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"IPv4 addresses"),(0,l.kt)("td",{parentName:"tr",align:null},"Used for MetalLB deployment")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"BGP mode"),(0,l.kt)("td",{parentName:"tr",align:null},"One or more routers that support BGP functionality")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"TCP/UDP port 7946 open between nodes"),(0,l.kt)("td",{parentName:"tr",align:null},"Memberlist requirement")))),(0,l.kt)("h3",{id:"metallb-installation"},"MetalLB Installation"),(0,l.kt)("h4",{id:"preparation"},"Preparation"),(0,l.kt)("p",null,"If you are using kube-proxy in IPVS mode, starting from Kubernetes v1.14.2, you need to enable strict ARP mode.",(0,l.kt)("br",{parentName:"p"}),"\n","By default, Kube-router enables strict ARP, so this feature is not required if you are using Kube-router as a service proxy.",(0,l.kt)("br",{parentName:"p"}),"\n","Before applying strict ARP mode, check the current mode."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"# see what changes would be made, returns nonzero returncode if different\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\ngrep strictARP\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"strictARP: false\n")),(0,l.kt)("p",null,"If strictARP: false is outputted, run the following to change it to strictARP: true.\n(If strictARP: true is already outputted, you do not need to execute the following command)."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'# actually apply the changes, returns nonzero returncode on errors only\nkubectl get configmap kube-proxy -n kube-system -o yaml | \\\nsed -e "s/strictARP: false/strictARP: true/" | \\\nkubectl apply -f - -n kube-system\n')),(0,l.kt)("p",null,"If performed normally, it will be output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Warning: resource configmaps/kube-proxy is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.\nconfigmap/kube-proxy configured\n")),(0,l.kt)("h3",{id:"installation---manifest"},"Installation - Manifest"),(0,l.kt)("h4",{id:"1-install-metallb"},"1. Install MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/namespace.yaml\nkubectl apply -f https://raw.githubusercontent.com/metallb/metallb/v0.11.0/manifests/metallb.yaml\n")),(0,l.kt)("h4",{id:"2-check-installation"},"2. Check installation."),(0,l.kt)("p",null,"Wait until both pods in the metallb-system namespace are Running."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n metallb-system\n")),(0,l.kt)("p",null,"When everthing is Running, similar results will be output."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncontroller-7dcc8764f4-8n92q 1/1 Running 1 1m\nspeaker-fnf8l 1/1 Running 1 1m\n")),(0,l.kt)("p",null,"The components of the manifest are as follows:"),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},"metallb-system/controller",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a deployment, responsible for assigning external IP addresses for load balancing."))),(0,l.kt)("li",{parentName:"ul"},"metallb-system/speaker",(0,l.kt)("ul",{parentName:"li"},(0,l.kt)("li",{parentName:"ul"},"Deployed as a daemonset, responsible for configuring network communication to connect external traffic and services.")))),(0,l.kt)("p",null,"The service includes RBAC permissions which are necessary for the controller and speaker components to operate."),(0,l.kt)("h2",{id:"configuration"},"Configuration"),(0,l.kt)("p",null,"Setting up the load balancing policy of MetalLB can be done by deploying a configmap containing the related configuration information."),(0,l.kt)("p",null,"There are two modes that can be configured in MetalLB:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/layer2/"},"Layer 2 Mode")," "),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("a",{parentName:"li",href:"https://metallb.universe.tf/concepts/bgp/"},"BGP Mode")," ")),(0,l.kt)("p",null,"Here we will proceed with Layer 2 mode."),(0,l.kt)("h3",{id:"layer-2-configuration"},"Layer 2 Configuration"),(0,l.kt)("p",null,"In the Layer 2 mode, it is enough to set only the range of IP addresses to be used simply.",(0,l.kt)("br",{parentName:"p"}),"\n","When using Layer 2 mode, it is not necessary to bind IP to the network interface of the worker node, because it operates in a way that it responds directly to the ARP request of the local network and provides the computer's MAC address to the client."),(0,l.kt)("p",null,"The following ",(0,l.kt)("inlineCode",{parentName:"p"},"metallb_config.yaml")," file is the configuration for MetalLB to provide control over the IP range of 192.168.35.100 ~ 192.168.35.110, and to configure Layer 2 mode."),(0,l.kt)("p",null,"In case the cluster node and the client node are separated, the range of 192.168.35.100 ~ 192.168.35.110 must be accessible by both the client node and the cluster node."),(0,l.kt)("h4",{id:"metallb_configyaml"},"metallb_config.yaml"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nkind: ConfigMap\nmetadata:\n namespace: metallb-system\n name: config\ndata:\n config: |\n address-pools:\n - name: default\n protocol: layer2\n addresses:\n - 192.168.35.100-192.168.35.110 # IP \ub300\uc5ed\ud3ed\n")),(0,l.kt)("p",null,"Apply the above settings."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"kubectl apply -f metallb_config.yaml \n")),(0,l.kt)("p",null,"If deployed normally, it will output as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-test"},"configmap/config created\n")),(0,l.kt)("h2",{id:"using-metallb"},"Using MetalLB"),(0,l.kt)("h3",{id:"kubeflow-dashboard"},"Kubeflow Dashboard"),(0,l.kt)("p",null,"First, before getting the load-balancing feature from MetalLB, check the current status by changing the type of the istio-ingressgateway service in the istio-system namespace to ",(0,l.kt)("inlineCode",{parentName:"p"},"LoadBalancer")," to provide the Kubeflow Dashboard."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway ClusterIP 10.103.72.5 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input a desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, IP addresses will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"spec:\n clusterIP: 10.103.72.5\n clusterIPs:\n - 10.103.72.5\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: status-port\n port: 15021\n protocol: TCP\n targetPort: 15021\n - name: http2\n port: 80\n protocol: TCP\n targetPort: 8080\n - name: https\n port: 443\n protocol: TCP\n targetPort: 8443\n - name: tcp\n port: 31400\n protocol: TCP\n targetPort: 31400\n - name: tls\n port: 15443\n protocol: TCP\n targetPort: 15443\n selector:\n app: istio-ingressgateway\n istio: ingressgateway\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.100 # Add IP\nstatus:\n loadBalancer: {}\n")),(0,l.kt)("p",null,"If you check again, you will see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.100"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/istio-ingressgateway -n istio-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nistio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.100"},"http://192.168.35.100")," to verify the following screen is output."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-istio-ingressgateway-setting.png",src:t(4232).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"minio-dashboard"},"minio Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of minio-service, which provides the Dashboard of minio, in the kubeflow namespace to LoadBalancer to receive the load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service ClusterIP 10.109.209.87 9000/TCP 5h14m\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item. If you do not add, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n kubectl.kubernetes.io/last-applied-configuration: |\n {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>\n creationTimestamp: "2022-01-05T08:44:23Z"\n labels:\n application-crd-id: kubeflow-pipelines\n name: minio-service\n namespace: kubeflow\n resourceVersion: "21120"\n uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48\nspec:\n clusterIP: 10.109.209.87\n clusterIPs:\n - 10.109.209.87\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: http\n port: 9000\n protocol: TCP\n targetPort: 9000\n selector:\n app: minio\n application-crd-id: kubeflow-pipelines\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.101 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.101"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/minio-service -n kubeflow\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nminio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m\n")),(0,l.kt)("p",null,"Open a web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.101:9000"},"http://192.168.35.101:9000")," to confirm the following screen is printed. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-minio-setting.png",src:t(9706).Z,width:"1811",height:"1046"})),(0,l.kt)("h3",{id:"mlflow-dashboard"},"mlflow Dashboard"),(0,l.kt)("p",null,"First, we check the current status before changing the type of mlflow-server-service service in the mlflow-system namespace that provides the mlflow Dashboard to LoadBalancer to receive load balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("p",null,"The type of this service is ClusterIP and you can confirm that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service ClusterIP 10.111.173.209 5000/TCP 4m50s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to input the desired IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If you do not add it, the IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: mlflow-server\n meta.helm.sh/release-namespace: mlflow-system\n creationTimestamp: "2022-01-07T04:00:19Z"\n labels:\n app.kubernetes.io/managed-by: Helm\n name: mlflow-server-service\n namespace: mlflow-system\n resourceVersion: "276246"\n uid: e5d39fb7-ad98-47e7-b512-f9c673055356\nspec:\n clusterIP: 10.111.173.209\n clusterIPs:\n - 10.111.173.209\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - port: 5000\n protocol: TCP\n targetPort: 5000\n selector:\n app.kubernetes.io/name: mlflow-server\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.102 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If we check again, we can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.102"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/mlflow-server-service -n mlflow-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s\n")),(0,l.kt)("p",null,"Open the web browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.102:5000"},"http://192.168.35.102:5000")," to confirm the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-mlflow-setting.png",src:t(529).Z,width:"1922",height:"1082"})),(0,l.kt)("h3",{id:"grafana-dashboard"},"Grafana Dashboard"),(0,l.kt)("p",null,"First, check the current status before changing the type of seldon-core-analytics-grafana service in the seldon-system namespace which provides Grafana's Dashboard to receive Load Balancing function from MetalLB."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("p",null,"The type of the corresponding service is ClusterIP, and you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"none"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana ClusterIP 10.109.20.161 80/TCP 94s\n")),(0,l.kt)("p",null,"Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item.",(0,l.kt)("br",{parentName:"p"}),"\n","If not, an IP address will be assigned sequentially from the IP address pool set above."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl edit svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: v1\nkind: Service\nmetadata:\n annotations:\n meta.helm.sh/release-name: seldon-core-analytics\n meta.helm.sh/release-namespace: seldon-system\n creationTimestamp: "2022-01-07T04:16:47Z"\n labels:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/managed-by: Helm\n app.kubernetes.io/name: grafana\n app.kubernetes.io/version: 7.0.3\n helm.sh/chart: grafana-5.1.4\n name: seldon-core-analytics-grafana\n namespace: seldon-system\n resourceVersion: "280605"\n uid: 75073b78-92ec-472c-b0d5-240038ea8fa5\nspec:\n clusterIP: 10.109.20.161\n clusterIPs:\n - 10.109.20.161\n ipFamilies:\n - IPv4\n ipFamilyPolicy: SingleStack\n ports:\n - name: service\n port: 80\n protocol: TCP\n targetPort: 3000\n selector:\n app.kubernetes.io/instance: seldon-core-analytics\n app.kubernetes.io/name: grafana\n sessionAffinity: None\n type: LoadBalancer # Change ClusterIP to LoadBalancer\n loadBalancerIP: 192.168.35.103 # Add IP\nstatus:\n loadBalancer: {}\n')),(0,l.kt)("p",null,"If you check again, you can see that the External-IP value is ",(0,l.kt)("inlineCode",{parentName:"p"},"192.168.35.103"),"."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get svc/seldon-core-analytics-grafana -n seldon-system\n")),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nseldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s\n")),(0,l.kt)("p",null,"Open the Web Browser and connect to ",(0,l.kt)("a",{parentName:"p",href:"http://192.168.35.103:80"},"http://192.168.35.103:80")," to confirm that the following screen is displayed."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"login-after-grafana-setting.png",src:t(428).Z,width:"1922",height:"1082"})))}u.isMDXComponent=!0},428:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-grafana-setting-95945b35a1316b2dbd1f0109991c0a0b.png"},4232:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-istio-ingressgateway-setting-3adfcf1bd5c4ddf45c54f4c4b5d4ceab.png"},9706:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-minio-setting-78fb86dafe3137ae3ecfbb49e2d7effb.png"},529:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/login-after-mlflow-setting-a4b0d197be47701209a6ef99612e89d6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/2ded1a41.470748c1.js b/en/assets/js/2ded1a41.9af81dc0.js similarity index 98% rename from en/assets/js/2ded1a41.470748c1.js rename to en/assets/js/2ded1a41.9af81dc0.js index 05bea739..84a287b0 100644 --- a/en/assets/js/2ded1a41.470748c1.js +++ b/en/assets/js/2ded1a41.9af81dc0.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6997],{3905:(e,t,r)=>{r.d(t,{Zo:()=>d,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),u=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},d=function(e){var t=u(e.components);return o.createElement(l.Provider,{value:t},e.children)},c="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),c=u(r),b=n,f=c["".concat(l,".").concat(b)]||c[b]||p[b]||a;return r?o.createElement(f,s(s({ref:t},d),{},{components:r})):o.createElement(f,s({ref:t},d))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[c]="string"==typeof e?e:n,s[1]=i;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"version-1.0/kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/volumes"}},l={},u=[],d={toc:u},c="wrapper";function p(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,o.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(7173).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"We can see the following screen. "),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(1964).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes."),(0,n.kt)("p",null,"You can refer to the ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"TensorBoard documentation")," for more information on using TensorBoard with Kubeflow Pipeline runs."),(0,n.kt)("p",null,"There are various ways to visualize the results of Kubeflow Pipeline runs, and in ",(0,n.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context."))}p.isMDXComponent=!0},7173:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1964:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6997],{3905:(e,t,r)=>{r.d(t,{Zo:()=>d,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),u=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},d=function(e){var t=u(e.components);return o.createElement(l.Provider,{value:t},e.children)},c="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),c=u(r),b=n,f=c["".concat(l,".").concat(b)]||c[b]||p[b]||a;return r?o.createElement(f,s(s({ref:t},d),{},{components:r})):o.createElement(f,s({ref:t},d))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[c]="string"==typeof e?e:n,s[1]=i;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"version-1.0/kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/volumes"}},l={},u=[],d={toc:u},c="wrapper";function p(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,o.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(7173).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"We can see the following screen. "),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(1964).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes."),(0,n.kt)("p",null,"You can refer to the ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"TensorBoard documentation")," for more information on using TensorBoard with Kubeflow Pipeline runs."),(0,n.kt)("p",null,"There are various ways to visualize the results of Kubeflow Pipeline runs, and in ",(0,n.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context."))}p.isMDXComponent=!0},7173:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},1964:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file diff --git a/en/assets/js/2e8b9598.284472ba.js b/en/assets/js/2e8b9598.40432512.js similarity index 99% rename from en/assets/js/2e8b9598.284472ba.js rename to en/assets/js/2e8b9598.40432512.js index 6904d0df..b59468c1 100644 --- a/en/assets/js/2e8b9598.284472ba.js +++ b/en/assets/js/2e8b9598.40432512.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3552],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=a.createContext({}),c=function(e){var t=a.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=c(e.components);return a.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),d=c(n),m=o,h=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return n?a.createElement(h,l(l({ref:t},p),{},{components:n})):a.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,l=new Array(r);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[d]="string"==typeof e?e:o,l[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>c});var a=n(7462),o=(n(7294),n(3905));const r={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-components/install-components-pg",id:"setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/en/docs/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/en/docs/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/en/docs/kubeflow-dashboard-guide/intro"}},i={},c=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3},{value:"References",id:"references",level:2}],p={toc:c},d="wrapper";function u(e){let{components:t,...r}=e;return(0,o.kt)(d,(0,a.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,o.kt)("p",null,"Prometheus and Grafana are tools for monitoring.",(0,o.kt)("br",{parentName:"p"}),"\n","For stable service operation, it is necessary to continuously observe the status of the service and infrastructure where the service is operating, and to respond quickly based on the observed metrics when a problem arises.",(0,o.kt)("br",{parentName:"p"}),"\n","Among the many tools to efficiently perform such monitoring, ",(0,o.kt)("em",{parentName:"p"},"Everyone's MLOps")," will use open source Prometheus and Grafana."),(0,o.kt)("p",null,"For more information, please refer to the ",(0,o.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus Official Documentation")," and ",(0,o.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana Official Documentation"),"."),(0,o.kt)("p",null,"Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other."),(0,o.kt)("p",null,"In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully."),(0,o.kt)("p",null,"We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository."),(0,o.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,o.kt)("p",null,"If the following message is output, it means that it has been added successfully."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is displayed, it means that the update was successful."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"helm-install"},"Helm Install"),(0,o.kt)("p",null,"Install version 1.12.0 of the seldon-core-analytics Helm Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,o.kt)("p",null,"The following message should be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,o.kt)("p",null,"Check to see if it was installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,o.kt)("p",null,"Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,o.kt)("h3",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),", then the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-install",src:n(4171).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email or username: ",(0,o.kt)("inlineCode",{parentName:"li"},"admin")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"password"))),(0,o.kt)("p",null,"When you log in, the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-login",src:n(7985).Z,width:"3640",height:"2140"})),(0,o.kt)("p",null,"Click the dashboard icon on the left and click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Manage")," button."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard-click",src:n(3443).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"You can see that the basic Grafana dashboard is included. Click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," dashboard among them."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard",src:n(3323).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null," The Seldon Core API Dashboard is visible and can be confirmed with the following output."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"seldon-dashboard",src:n(2592).Z,width:"5016",height:"2826"})),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},3443:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},3323:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},4171:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},7985:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},2592:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3552],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=a.createContext({}),c=function(e){var t=a.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=c(e.components);return a.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),d=c(n),m=o,h=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return n?a.createElement(h,l(l({ref:t},p),{},{components:n})):a.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,l=new Array(r);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[d]="string"==typeof e?e:o,l[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>c});var a=n(7462),o=(n(7294),n(3905));const r={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-components/install-components-pg",id:"setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/en/docs/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/en/docs/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/en/docs/kubeflow-dashboard-guide/intro"}},i={},c=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3},{value:"References",id:"references",level:2}],p={toc:c},d="wrapper";function u(e){let{components:t,...r}=e;return(0,o.kt)(d,(0,a.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,o.kt)("p",null,"Prometheus and Grafana are tools for monitoring.",(0,o.kt)("br",{parentName:"p"}),"\n","For stable service operation, it is necessary to continuously observe the status of the service and infrastructure where the service is operating, and to respond quickly based on the observed metrics when a problem arises.",(0,o.kt)("br",{parentName:"p"}),"\n","Among the many tools to efficiently perform such monitoring, ",(0,o.kt)("em",{parentName:"p"},"Everyone's MLOps")," will use open source Prometheus and Grafana."),(0,o.kt)("p",null,"For more information, please refer to the ",(0,o.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus Official Documentation")," and ",(0,o.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana Official Documentation"),"."),(0,o.kt)("p",null,"Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other."),(0,o.kt)("p",null,"In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully."),(0,o.kt)("p",null,"We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository."),(0,o.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,o.kt)("p",null,"If the following message is output, it means that it has been added successfully."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is displayed, it means that the update was successful."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"helm-install"},"Helm Install"),(0,o.kt)("p",null,"Install version 1.12.0 of the seldon-core-analytics Helm Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,o.kt)("p",null,"The following message should be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,o.kt)("p",null,"Check to see if it was installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,o.kt)("p",null,"Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,o.kt)("h3",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),", then the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-install",src:n(4171).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email or username: ",(0,o.kt)("inlineCode",{parentName:"li"},"admin")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"password"))),(0,o.kt)("p",null,"When you log in, the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-login",src:n(7985).Z,width:"3640",height:"2140"})),(0,o.kt)("p",null,"Click the dashboard icon on the left and click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Manage")," button."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard-click",src:n(3443).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"You can see that the basic Grafana dashboard is included. Click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," dashboard among them."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard",src:n(3323).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null," The Seldon Core API Dashboard is visible and can be confirmed with the following output."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"seldon-dashboard",src:n(2592).Z,width:"5016",height:"2826"})),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},3443:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},3323:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},4171:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},7985:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},2592:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file diff --git a/en/assets/js/302c3da2.aaebe2da.js b/en/assets/js/302c3da2.b05a62f9.js similarity index 99% rename from en/assets/js/302c3da2.aaebe2da.js rename to en/assets/js/302c3da2.b05a62f9.js index a6401520..47e69f84 100644 --- a/en/assets/js/302c3da2.aaebe2da.js +++ b/en/assets/js/302c3da2.b05a62f9.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7189],{3905:(e,t,l)=>{l.d(t,{Zo:()=>c,kt:()=>f});var n=l(7294);function a(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function r(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(a[l]=e[l]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(a[l]=e[l])}return a}var i=n.createContext({}),p=function(e){var t=n.useContext(i),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var l=e.components,a=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),m=p(l),d=a,f=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return l?n.createElement(f,o(o({ref:t},c),{},{components:l})):n.createElement(f,o({ref:t},c))}));function f(e,t){var l=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=l.length,o=new Array(r);o[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[m]="string"==typeof e?e:a,o[1]=s;for(var p=2;p{l.r(t),l.d(t,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=l(7462),a=(l(7294),l(3905));const r={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/en/docs/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/en/docs/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/en/docs/setup-components/install-components-seldon"}},i={},p=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"Install PostgreSQL DB",id:"install-postgresql-db",level:3},{value:"Setup Minio",id:"setup-minio",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3}],c={toc:p},m="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,a.kt)("p",null,"MLflow is a popular open-source ML experiment management tool. In addition to ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"experiment management"),", MLflow provides functionalities for ML ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"model packaging"),", ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"deployment management"),", and ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"model storage"),"."),(0,a.kt)("p",null,"In ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will be using MLflow for experiment management purposes.",(0,a.kt)("br",{parentName:"p"}),"\n","o store the data managed by MLflow and provide a user interface, we will deploy the MLflow Tracking Server on the Kubernetes cluster."),(0,a.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"install-postgresql-db"},"Install PostgreSQL DB"),(0,a.kt)("p",null,"MLflow Tracking Server deploys a PostgreSQL DB for use as a Backend Store to a Kubernetes cluster."),(0,a.kt)("p",null,"First, create a namespace called ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,a.kt)("p",null,"If the following message is output, it means that it has been generated normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,a.kt)("p",null,"Create a Postgresql DB in the ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,a.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,a.kt)("p",null,"Wait until one postgresql related pod is running in the mlflow-system namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,a.kt)("p",null,"If it is output similar to the following, it has executed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,a.kt)("h3",{id:"setup-minio"},"Setup Minio"),(0,a.kt)("p",null,"We will utilize the Minio that was installed in the previous Kubeflow installation step.\nHowever, in order to separate it for kubeflow and mlflow purposes, we will create a mlflow-specific bucket.",(0,a.kt)("br",{parentName:"p"}),"\n","First, port-forward the minio-service to access Minio and create the bucket."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000")," to display the following screen."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"minio-install",src:l(4642).Z,width:"2906",height:"1946"})),(0,a.kt)("p",null,"Enter the following credentials to log in: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Username: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio")),(0,a.kt)("li",{parentName:"ul"},"Password: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,a.kt)("p",null,"Click the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"+"))," button on the right side bottom, then click ",(0,a.kt)("inlineCode",{parentName:"p"},"Create Bucket"),". "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create-bucket",src:l(1817).Z,width:"2902",height:"1950"})),(0,a.kt)("p",null,"Enter ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"Bucket Name")," to create the bucket."),(0,a.kt)("p",null,"If successfully created, you will see a bucket named ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," on the left.\n",(0,a.kt)("img",{alt:"mlflow-bucket",src:l(1047).Z,width:"2902",height:"1950"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,a.kt)("p",null,"If the following message is displayed, it means it has been added successfully."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,a.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,a.kt)("p",null,"If the following message is displayed, it means that the update has been successfully completed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,a.kt)("h3",{id:"helm-install"},"Helm Install"),(0,a.kt)("p",null,"Install mlflow-server Helm Chart version 0.2.0."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"The above Helm chart installs MLflow with the connection information for its backend store and artifacts store set to the default minio created during the Kubeflow installation process and the postgresql information created from the ",(0,a.kt)("a",{parentName:"li",href:"#postgresql-db-installation"},"PostgreSQL DB installation")," above.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use a separate DB or object storage, please refer to the ",(0,a.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo")," and set the values separately during helm install.")))),(0,a.kt)("p",null,"The following message should be displayed:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,a.kt)("p",null,"Check to see if it was installed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,a.kt)("p",null,"Wait until one mlflow-server related pod is running in the mlflow-system namespace.",(0,a.kt)("br",{parentName:"p"}),"\n","If it is output similar to the following, then it has been successfully executed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,a.kt)("h3",{id:"check-installation"},"Check installation"),(0,a.kt)("p",null,"Let's now check if we can successfully connect to the MLflow Server."),(0,a.kt)("p",null,"First, we will perform port forwarding in order to connect from the client node."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000")," and the following screen will be output."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlflow-install",src:l(2852).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},1817:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},4642:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},1047:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},2852:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7189],{3905:(e,t,l)=>{l.d(t,{Zo:()=>c,kt:()=>f});var n=l(7294);function a(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function r(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(a[l]=e[l]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(a[l]=e[l])}return a}var i=n.createContext({}),p=function(e){var t=n.useContext(i),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var l=e.components,a=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),m=p(l),d=a,f=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return l?n.createElement(f,o(o({ref:t},c),{},{components:l})):n.createElement(f,o({ref:t},c))}));function f(e,t){var l=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=l.length,o=new Array(r);o[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[m]="string"==typeof e?e:a,o[1]=s;for(var p=2;p{l.r(t),l.d(t,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=l(7462),a=(l(7294),l(3905));const r={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/en/docs/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/en/docs/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/en/docs/setup-components/install-components-seldon"}},i={},p=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"Install PostgreSQL DB",id:"install-postgresql-db",level:3},{value:"Setup Minio",id:"setup-minio",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3}],c={toc:p},m="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,a.kt)("p",null,"MLflow is a popular open-source ML experiment management tool. In addition to ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"experiment management"),", MLflow provides functionalities for ML ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"model packaging"),", ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"deployment management"),", and ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"model storage"),"."),(0,a.kt)("p",null,"In ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will be using MLflow for experiment management purposes.",(0,a.kt)("br",{parentName:"p"}),"\n","o store the data managed by MLflow and provide a user interface, we will deploy the MLflow Tracking Server on the Kubernetes cluster."),(0,a.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"install-postgresql-db"},"Install PostgreSQL DB"),(0,a.kt)("p",null,"MLflow Tracking Server deploys a PostgreSQL DB for use as a Backend Store to a Kubernetes cluster."),(0,a.kt)("p",null,"First, create a namespace called ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,a.kt)("p",null,"If the following message is output, it means that it has been generated normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,a.kt)("p",null,"Create a Postgresql DB in the ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,a.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,a.kt)("p",null,"Wait until one postgresql related pod is running in the mlflow-system namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,a.kt)("p",null,"If it is output similar to the following, it has executed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,a.kt)("h3",{id:"setup-minio"},"Setup Minio"),(0,a.kt)("p",null,"We will utilize the Minio that was installed in the previous Kubeflow installation step.\nHowever, in order to separate it for kubeflow and mlflow purposes, we will create a mlflow-specific bucket.",(0,a.kt)("br",{parentName:"p"}),"\n","First, port-forward the minio-service to access Minio and create the bucket."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000")," to display the following screen."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"minio-install",src:l(4642).Z,width:"2906",height:"1946"})),(0,a.kt)("p",null,"Enter the following credentials to log in: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Username: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio")),(0,a.kt)("li",{parentName:"ul"},"Password: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,a.kt)("p",null,"Click the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"+"))," button on the right side bottom, then click ",(0,a.kt)("inlineCode",{parentName:"p"},"Create Bucket"),". "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create-bucket",src:l(1817).Z,width:"2902",height:"1950"})),(0,a.kt)("p",null,"Enter ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"Bucket Name")," to create the bucket."),(0,a.kt)("p",null,"If successfully created, you will see a bucket named ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," on the left.\n",(0,a.kt)("img",{alt:"mlflow-bucket",src:l(1047).Z,width:"2902",height:"1950"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,a.kt)("p",null,"If the following message is displayed, it means it has been added successfully."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,a.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,a.kt)("p",null,"If the following message is displayed, it means that the update has been successfully completed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,a.kt)("h3",{id:"helm-install"},"Helm Install"),(0,a.kt)("p",null,"Install mlflow-server Helm Chart version 0.2.0."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"The above Helm chart installs MLflow with the connection information for its backend store and artifacts store set to the default minio created during the Kubeflow installation process and the postgresql information created from the ",(0,a.kt)("a",{parentName:"li",href:"#postgresql-db-installation"},"PostgreSQL DB installation")," above.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use a separate DB or object storage, please refer to the ",(0,a.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo")," and set the values separately during helm install.")))),(0,a.kt)("p",null,"The following message should be displayed:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,a.kt)("p",null,"Check to see if it was installed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,a.kt)("p",null,"Wait until one mlflow-server related pod is running in the mlflow-system namespace.",(0,a.kt)("br",{parentName:"p"}),"\n","If it is output similar to the following, then it has been successfully executed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,a.kt)("h3",{id:"check-installation"},"Check installation"),(0,a.kt)("p",null,"Let's now check if we can successfully connect to the MLflow Server."),(0,a.kt)("p",null,"First, we will perform port forwarding in order to connect from the client node."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000")," and the following screen will be output."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlflow-install",src:l(2852).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},1817:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},4642:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},1047:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},2852:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file diff --git a/en/assets/js/30bad7fd.969a1b95.js b/en/assets/js/30bad7fd.fdb76f80.js similarity index 99% rename from en/assets/js/30bad7fd.969a1b95.js rename to en/assets/js/30bad7fd.fdb76f80.js index 28178433..966a0cec 100644 --- a/en/assets/js/30bad7fd.969a1b95.js +++ b/en/assets/js/30bad7fd.fdb76f80.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1684],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=o.createContext({}),l=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=l(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},h=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,a=e.originalType,c=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=l(n),h=r,m=d["".concat(c,".").concat(h)]||d[h]||p[h]||a;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=n.length,i=new Array(a);i[0]=h;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:r,i[1]=s;for(var l=2;l{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>l});var o=n(7462),r=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"introduction/why_kubernetes",id:"version-1.0/introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/en/docs/1.0/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/why_kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/en/docs/1.0/introduction/component"},next:{title:"1. Introduction",permalink:"/en/docs/1.0/setup-kubernetes/intro"}},c={},l=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:l},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,o.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,r.kt)("p",null,"When talking about MLOps, why is the word Kubernetes always heard together?"),(0,r.kt)("p",null,"To build a successful MLOps system, various components are needed as described in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/introduction/component"},"Components of MLOps"),", but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure."),(0,r.kt)("p",null,"The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as ",(0,r.kt)("em",{parentName:"p"},"'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster?")," when developing and deploying machine learning models while a few developers share a small number of clusters."),(0,r.kt)("h2",{id:"container"},"Container"),(0,r.kt)("p",null,"Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as ",(0,r.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/en-us/overview/what-is-a-container/"},"follows"),"."),(0,r.kt)("blockquote",null,(0,r.kt)("p",{parentName:"blockquote"},"Container: Standardized, portable packaging of an application's code, libraries, and configuration files")),(0,r.kt)("p",null,"But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as ",(0,r.kt)("em",{parentName:"p"},'"It works on my notebook, why not yours?"'),"."),(0,r.kt)("p",null,'One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.'),(0,r.kt)("p",null,"However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps."),(0,r.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,r.kt)("p",null,'Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.'),(0,r.kt)("p",null,"In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues."),(0,r.kt)("p",null,"For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations."),(0,r.kt)("p",null,"A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier."),(0,r.kt)("p",null,"How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal."),(0,r.kt)("p",null,"As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems."),(0,r.kt)("p",null,"According to the ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"survey")," released by CNCF in 2018, Kubernetes was already showing its prominence. The ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"survey")," published in 2019 indicates that 78% of respondents were using Kubernetes at a production level."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"k8s-graph",src:n(850).Z,width:"2048",height:"1317"})),(0,r.kt)("p",null,'The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.'))}p.isMDXComponent=!0},850:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1684],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=o.createContext({}),l=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=l(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},h=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,a=e.originalType,c=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=l(n),h=r,m=d["".concat(c,".").concat(h)]||d[h]||p[h]||a;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=n.length,i=new Array(a);i[0]=h;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:r,i[1]=s;for(var l=2;l{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>l});var o=n(7462),r=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"introduction/why_kubernetes",id:"version-1.0/introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/en/docs/1.0/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/why_kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/en/docs/1.0/introduction/component"},next:{title:"1. Introduction",permalink:"/en/docs/1.0/setup-kubernetes/intro"}},c={},l=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:l},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,o.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,r.kt)("p",null,"When talking about MLOps, why is the word Kubernetes always heard together?"),(0,r.kt)("p",null,"To build a successful MLOps system, various components are needed as described in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/introduction/component"},"Components of MLOps"),", but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure."),(0,r.kt)("p",null,"The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as ",(0,r.kt)("em",{parentName:"p"},"'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster?")," when developing and deploying machine learning models while a few developers share a small number of clusters."),(0,r.kt)("h2",{id:"container"},"Container"),(0,r.kt)("p",null,"Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as ",(0,r.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/en-us/overview/what-is-a-container/"},"follows"),"."),(0,r.kt)("blockquote",null,(0,r.kt)("p",{parentName:"blockquote"},"Container: Standardized, portable packaging of an application's code, libraries, and configuration files")),(0,r.kt)("p",null,"But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as ",(0,r.kt)("em",{parentName:"p"},'"It works on my notebook, why not yours?"'),"."),(0,r.kt)("p",null,'One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.'),(0,r.kt)("p",null,"However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps."),(0,r.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,r.kt)("p",null,'Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.'),(0,r.kt)("p",null,"In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues."),(0,r.kt)("p",null,"For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations."),(0,r.kt)("p",null,"A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier."),(0,r.kt)("p",null,"How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal."),(0,r.kt)("p",null,"As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems."),(0,r.kt)("p",null,"According to the ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"survey")," released by CNCF in 2018, Kubernetes was already showing its prominence. The ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"survey")," published in 2019 indicates that 78% of respondents were using Kubernetes at a production level."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"k8s-graph",src:n(850).Z,width:"2048",height:"1317"})),(0,r.kt)("p",null,'The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.'))}p.isMDXComponent=!0},850:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file diff --git a/en/assets/js/311b36d9.85a78d94.js b/en/assets/js/311b36d9.4c449b75.js similarity index 98% rename from en/assets/js/311b36d9.85a78d94.js rename to en/assets/js/311b36d9.4c449b75.js index 84024174..59316f8d 100644 --- a/en/assets/js/311b36d9.85a78d94.js +++ b/en/assets/js/311b36d9.4c449b75.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[697],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function l(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),c=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},p=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=c(r),k=a,m=u["".concat(s,".").concat(k)]||u[k]||d[k]||o;return r?n.createElement(m,l(l({ref:t},p),{},{components:r})):n.createElement(m,l({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,l=new Array(o);l[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:a,l[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const o={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/install",id:"version-1.0/prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/en/docs/1.0/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/install.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/1.0/prerequisites/docker/introduction"}},s={},c=[{value:"Docker",id:"docker",level:2},{value:"Check Installation",id:"check-installation",level:2},{value:"Before diving in..",id:"before-diving-in",level:2}],p={toc:c},u="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(u,(0,n.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"To practice Docker, you need to install Docker.",(0,a.kt)("br",{parentName:"p"}),"\n","The Docker installation varies depending on which OS you are using.",(0,a.kt)("br",{parentName:"p"}),"\n","Please refer to the official website for the Docker installation that fits your environment: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"check-installation"},"Check Installation"),(0,a.kt)("p",null,"Check installation requires an OS, terminal environment where ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," runs correctly."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"before-diving-in"},"Before diving in.."),(0,a.kt)("p",null,"It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps."))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[697],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function l(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),c=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},p=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=c(r),k=a,m=u["".concat(s,".").concat(k)]||u[k]||d[k]||o;return r?n.createElement(m,l(l({ref:t},p),{},{components:r})):n.createElement(m,l({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,l=new Array(o);l[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:a,l[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const o={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/install",id:"version-1.0/prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/en/docs/1.0/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/install.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/1.0/prerequisites/docker/introduction"}},s={},c=[{value:"Docker",id:"docker",level:2},{value:"Check Installation",id:"check-installation",level:2},{value:"Before diving in..",id:"before-diving-in",level:2}],p={toc:c},u="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(u,(0,n.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"To practice Docker, you need to install Docker.",(0,a.kt)("br",{parentName:"p"}),"\n","The Docker installation varies depending on which OS you are using.",(0,a.kt)("br",{parentName:"p"}),"\n","Please refer to the official website for the Docker installation that fits your environment: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"check-installation"},"Check Installation"),(0,a.kt)("p",null,"Check installation requires an OS, terminal environment where ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," runs correctly."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"before-diving-in"},"Before diving in.."),(0,a.kt)("p",null,"It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/323a8b36.9e40bb6c.js b/en/assets/js/323a8b36.7d25fd1a.js similarity index 98% rename from en/assets/js/323a8b36.9e40bb6c.js rename to en/assets/js/323a8b36.7d25fd1a.js index ea17309b..e8ff8052 100644 --- a/en/assets/js/323a8b36.9e40bb6c.js +++ b/en/assets/js/323a8b36.7d25fd1a.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4051],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=a.createContext({}),c=function(e){var t=a.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=c(e.components);return a.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),d=c(n),m=o,h=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return n?a.createElement(h,l(l({ref:t},p),{},{components:n})):a.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,l=new Array(r);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[d]="string"==typeof e?e:o,l[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>c});var a=n(7462),o=(n(7294),n(3905));const r={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-components/install-components-pg",id:"version-1.0/setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/en/docs/1.0/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/en/docs/1.0/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/intro"}},i={},c=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3},{value:"References",id:"references",level:2}],p={toc:c},d="wrapper";function u(e){let{components:t,...r}=e;return(0,o.kt)(d,(0,a.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,o.kt)("p",null,"Prometheus and Grafana are tools for monitoring.",(0,o.kt)("br",{parentName:"p"}),"\n","For stable service operation, it is necessary to continuously observe the status of the service and infrastructure where the service is operating, and to respond quickly based on the observed metrics when a problem arises.",(0,o.kt)("br",{parentName:"p"}),"\n","Among the many tools to efficiently perform such monitoring, ",(0,o.kt)("em",{parentName:"p"},"Everyone's MLOps")," will use open source Prometheus and Grafana."),(0,o.kt)("p",null,"For more information, please refer to the ",(0,o.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus Official Documentation")," and ",(0,o.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana Official Documentation"),"."),(0,o.kt)("p",null,"Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other."),(0,o.kt)("p",null,"In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully."),(0,o.kt)("p",null,"We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository."),(0,o.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,o.kt)("p",null,"If the following message is output, it means that it has been added successfully."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is displayed, it means that the update was successful."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"helm-install"},"Helm Install"),(0,o.kt)("p",null,"Install version 1.12.0 of the seldon-core-analytics Helm Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,o.kt)("p",null,"The following message should be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,o.kt)("p",null,"Check to see if it was installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,o.kt)("p",null,"Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,o.kt)("h3",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),", then the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-install",src:n(7009).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email or username: ",(0,o.kt)("inlineCode",{parentName:"li"},"admin")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"password"))),(0,o.kt)("p",null,"When you log in, the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-login",src:n(7525).Z,width:"3640",height:"2140"})),(0,o.kt)("p",null,"Click the dashboard icon on the left and click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Manage")," button."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard-click",src:n(8875).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"You can see that the basic Grafana dashboard is included. Click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," dashboard among them."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard",src:n(9890).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null," The Seldon Core API Dashboard is visible and can be confirmed with the following output."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"seldon-dashboard",src:n(7011).Z,width:"5016",height:"2826"})),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},8875:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},9890:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},7009:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},7525:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},7011:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4051],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var i=a.createContext({}),c=function(e){var t=a.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=c(e.components);return a.createElement(i.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,i=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),d=c(n),m=o,h=d["".concat(i,".").concat(m)]||d[m]||u[m]||r;return n?a.createElement(h,l(l({ref:t},p),{},{components:n})):a.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,l=new Array(r);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[d]="string"==typeof e?e:o,l[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>c});var a=n(7462),o=(n(7294),n(3905));const r={title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},l=void 0,s={unversionedId:"setup-components/install-components-pg",id:"version-1.0/setup-components/install-components-pg",title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-pg.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-pg",permalink:"/en/docs/1.0/setup-components/install-components-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Prometheus & Grafana",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Prometheus & Grafana",sidebar_position:4,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon-Core",permalink:"/en/docs/1.0/setup-components/install-components-seldon"},next:{title:"1. Central Dashboard",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/intro"}},i={},c=[{value:"Prometheus & Grafana",id:"prometheus--grafana",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3},{value:"References",id:"references",level:2}],p={toc:c},d="wrapper";function u(e){let{components:t,...r}=e;return(0,o.kt)(d,(0,a.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prometheus--grafana"},"Prometheus & Grafana"),(0,o.kt)("p",null,"Prometheus and Grafana are tools for monitoring.",(0,o.kt)("br",{parentName:"p"}),"\n","For stable service operation, it is necessary to continuously observe the status of the service and infrastructure where the service is operating, and to respond quickly based on the observed metrics when a problem arises.",(0,o.kt)("br",{parentName:"p"}),"\n","Among the many tools to efficiently perform such monitoring, ",(0,o.kt)("em",{parentName:"p"},"Everyone's MLOps")," will use open source Prometheus and Grafana."),(0,o.kt)("p",null,"For more information, please refer to the ",(0,o.kt)("a",{parentName:"p",href:"https://prometheus.io/docs/introduction/overview/"},"Prometheus Official Documentation")," and ",(0,o.kt)("a",{parentName:"p",href:"https://grafana.com/docs/"},"Grafana Official Documentation"),"."),(0,o.kt)("p",null,"Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other."),(0,o.kt)("p",null,"In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully."),(0,o.kt)("p",null,"We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository."),(0,o.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add seldonio https://storage.googleapis.com/seldon-charts\n")),(0,o.kt)("p",null,"If the following message is output, it means that it has been added successfully."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"seldonio" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is displayed, it means that the update was successful."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "seldonio" chart repository\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"helm-install"},"Helm Install"),(0,o.kt)("p",null,"Install version 1.12.0 of the seldon-core-analytics Helm Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core-analytics seldonio/seldon-core-analytics \\\n --namespace seldon-system \\\n --version 1.12.0\n")),(0,o.kt)("p",null,"The following message should be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\nNAME: seldon-core-analytics\nLAST DEPLOYED: Tue Dec 14 18:29:38 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\n")),(0,o.kt)("p",null,"Check to see if it was installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-core-analytics\n")),(0,o.kt)("p",null,"Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-core-analytics-grafana-657c956c88-ng8wn 2/2 Running 0 114s\nseldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s\nseldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s\nseldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s\nseldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s\nseldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s\n")),(0,o.kt)("h3",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8090"},"localhost:8090"),", then the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-install",src:n(7009).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email or username: ",(0,o.kt)("inlineCode",{parentName:"li"},"admin")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"password"))),(0,o.kt)("p",null,"When you log in, the following screen will be displayed."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"grafana-login",src:n(7525).Z,width:"3640",height:"2140"})),(0,o.kt)("p",null,"Click the dashboard icon on the left and click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Manage")," button."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard-click",src:n(8875).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null,"You can see that the basic Grafana dashboard is included. Click the ",(0,o.kt)("inlineCode",{parentName:"p"},"Prediction Analytics")," dashboard among them."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"dashboard",src:n(9890).Z,width:"5016",height:"2826"})),(0,o.kt)("p",null," The Seldon Core API Dashboard is visible and can be confirmed with the following output."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"seldon-dashboard",src:n(7011).Z,width:"5016",height:"2826"})),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/helm-charts/seldon-core-analytics"},"Seldon-Core-Analytics Helm Chart"))))}u.isMDXComponent=!0},8875:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-click-868bcd267717917295a8f9627d6c522e.png"},9890:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/dashboard-ee3d0192807699b2515d184ff00f426d.png"},7009:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-install-4ca59cc00fad5ee1a50d91f30ab89bb1.png"},7525:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/grafana-login-b91326a2a0082ffb560ad1b30d381091.png"},7011:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/seldon-dashboard-01eccd6a30aac640474edef01050d277.png"}}]); \ No newline at end of file diff --git a/en/assets/js/34be08f6.8130cb95.js b/en/assets/js/34be08f6.b19ae702.js similarity index 99% rename from en/assets/js/34be08f6.8130cb95.js rename to en/assets/js/34be08f6.b19ae702.js index fde6d390..b1181a42 100644 --- a/en/assets/js/34be08f6.8130cb95.js +++ b/en/assets/js/34be08f6.b19ae702.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4604],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>f});var o=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=o.createContext({}),u=function(e){var n=o.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},s=function(e){var n=u(e.components);return o.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},d=o.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),m=u(t),d=r,f=m["".concat(l,".").concat(d)]||m[d]||c[d]||a;return t?o.createElement(f,i(i({ref:n},s),{},{components:t})):o.createElement(f,i({ref:n},s))}));function f(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,i=new Array(a);i[0]=d;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,i[1]=p;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>a,metadata:()=>p,toc:()=>u});var o=t(7462),r=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},i=void 0,p={unversionedId:"kubeflow/basic-component",id:"version-1.0/kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/en/docs/1.0/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/en/docs/1.0/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/en/docs/1.0/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],s={toc:u},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,o.Z)({},s,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component"},"Component"),(0,r.kt)("p",null,"In order to write a component, the following must be written: "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Writing Component Contents "),(0,r.kt)("li",{parentName:"ol"},"Writing Component Wrapper ")),(0,r.kt)("p",null,"Now, let's look at each process."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"Component Contents are no different from the Python code we commonly write.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, let's try writing a component that takes a number as input, prints it, and then returns it.\nWe can write it in Python code like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,r.kt)("p",null,"However, when this code is run, an error occurs and it does not work because the ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that should be printed is not defined. "),(0,r.kt)("p",null,"As we saw in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),", values like ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that are required in component content are defined in ",(0,r.kt)("strong",{parentName:"p"},"Config"),". In order to execute component content, the necessary Configs must be passed from the component wrapper."),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"Now we need to create a component wrapper to be able to pass the required Configs."),(0,r.kt)("p",null,"Without a separate Config, it will be like this when wrapped with a component wrapper."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,r.kt)("p",null,"Now we add the required Config for the content as an argument to the wrapper. However, it is not just writing the argument but also writing the type hint of the argument. When Kubeflow converts the pipeline into the Kubeflow format, it checks if the specified input and output types are matched in the connection between the components. If the format of the input required by the component does not match the output received from another component, the pipeline cannot be created."),(0,r.kt)("p",null,"Now we complete the component wrapper by writing down the argument, its type and the type to be returned as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("p",null,"In Kubeflow, you can only use types that can be expressed in json as return values. The most commonly used and recommended types are as follows:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"int"),(0,r.kt)("li",{parentName:"ul"},"float"),(0,r.kt)("li",{parentName:"ul"},"str")),(0,r.kt)("p",null,"If you want to return multiple values instead of a single value, you must use ",(0,r.kt)("inlineCode",{parentName:"p"},"collections.namedtuple"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the Kubeflow official documentation ",(0,r.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow Official Documentation"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For example, if you want to write a component that returns the quotient and remainder of a number when divided by 2, it should be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Now you have to convert the written component into a format that can be used in Kubeflow. The conversion can be done through ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func"),". This converted form can be imported as a function in Python and used in the pipeline."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,r.kt)("p",null,"If it is not possible to share with Python code, you can share components with a YAML file and use them.\nTo do this, first convert the component to a YAML file and then use it in the pipeline with ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file"),"."),(0,r.kt)("p",null,"First, let's explain the process of converting the written component to a YAML file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,r.kt)("p",null,"If you run the Python code you wrote, a file called ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," will be created. When you check the file, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,r.kt)("p",null,"Now the generated file can be shared and used in the pipeline as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,r.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,r.kt)("p",null,"In Kubeflow, the execution order of components is as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull "),": Pull the image containing the execution environment information of the defined component."),(0,r.kt)("li",{parentName:"ol"},"Run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"),": Execute the component's content within the pulled image.")),(0,r.kt)("p",null,"Taking ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," as an example, the default image in ",(0,r.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7"),", so the component's content will be executed based on that image."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,r.kt)("h2",{id:"references"},"References:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4604],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>f});var o=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=o.createContext({}),u=function(e){var n=o.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},s=function(e){var n=u(e.components);return o.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},d=o.forwardRef((function(e,n){var t=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),m=u(t),d=r,f=m["".concat(l,".").concat(d)]||m[d]||c[d]||a;return t?o.createElement(f,i(i({ref:n},s),{},{components:t})):o.createElement(f,i({ref:n},s))}));function f(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var a=t.length,i=new Array(a);i[0]=d;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,i[1]=p;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>a,metadata:()=>p,toc:()=>u});var o=t(7462),r=(t(7294),t(3905));const a={title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},i=void 0,p={unversionedId:"kubeflow/basic-component",id:"version-1.0/kubeflow/basic-component",title:"4. Component - Write",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-component",permalink:"/en/docs/1.0/kubeflow/basic-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Component - Write",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Install Requirements",permalink:"/en/docs/1.0/kubeflow/basic-requirements"},next:{title:"5. Pipeline - Write",permalink:"/en/docs/1.0/kubeflow/basic-pipeline"}},l={},u=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Share component with yaml file",id:"share-component-with-yaml-file",level:3},{value:"How Kubeflow executes component",id:"how-kubeflow-executes-component",level:2},{value:"References:",id:"references",level:2}],s={toc:u},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,o.Z)({},s,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component"},"Component"),(0,r.kt)("p",null,"In order to write a component, the following must be written: "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Writing Component Contents "),(0,r.kt)("li",{parentName:"ol"},"Writing Component Wrapper ")),(0,r.kt)("p",null,"Now, let's look at each process."),(0,r.kt)("h2",{id:"component-contents"},"Component Contents"),(0,r.kt)("p",null,"Component Contents are no different from the Python code we commonly write.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, let's try writing a component that takes a number as input, prints it, and then returns it.\nWe can write it in Python code like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"print(number)\n")),(0,r.kt)("p",null,"However, when this code is run, an error occurs and it does not work because the ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that should be printed is not defined. "),(0,r.kt)("p",null,"As we saw in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts"},"Kubeflow Concepts"),", values like ",(0,r.kt)("inlineCode",{parentName:"p"},"number")," that are required in component content are defined in ",(0,r.kt)("strong",{parentName:"p"},"Config"),". In order to execute component content, the necessary Configs must be passed from the component wrapper."),(0,r.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,r.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,r.kt)("p",null,"Now we need to create a component wrapper to be able to pass the required Configs."),(0,r.kt)("p",null,"Without a separate Config, it will be like this when wrapped with a component wrapper."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number():\n print(number)\n return number\n")),(0,r.kt)("p",null,"Now we add the required Config for the content as an argument to the wrapper. However, it is not just writing the argument but also writing the type hint of the argument. When Kubeflow converts the pipeline into the Kubeflow format, it checks if the specified input and output types are matched in the connection between the components. If the format of the input required by the component does not match the output received from another component, the pipeline cannot be created."),(0,r.kt)("p",null,"Now we complete the component wrapper by writing down the argument, its type and the type to be returned as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"def print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("p",null,"In Kubeflow, you can only use types that can be expressed in json as return values. The most commonly used and recommended types are as follows:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"int"),(0,r.kt)("li",{parentName:"ul"},"float"),(0,r.kt)("li",{parentName:"ul"},"str")),(0,r.kt)("p",null,"If you want to return multiple values instead of a single value, you must use ",(0,r.kt)("inlineCode",{parentName:"p"},"collections.namedtuple"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the Kubeflow official documentation ",(0,r.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#passing-parameters-by-value"},"Kubeflow Official Documentation"),".",(0,r.kt)("br",{parentName:"p"}),"\n","For example, if you want to write a component that returns the quotient and remainder of a number when divided by 2, it should be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from typing import NamedTuple\n\n\ndef divide_and_return_number(\n number: int,\n) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):\n from collections import namedtuple\n\n quotient, remainder = divmod(number, 2)\n print("quotient is", quotient)\n print("remainder is", remainder)\n\n divide_outputs = namedtuple(\n "DivideOutputs",\n [\n "quotient",\n "remainder",\n ],\n )\n return divide_outputs(quotient, remainder)\n')),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Now you have to convert the written component into a format that can be used in Kubeflow. The conversion can be done through ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.create_component_from_func"),". This converted form can be imported as a function in Python and used in the pipeline."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},"from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n")),(0,r.kt)("h3",{id:"share-component-with-yaml-file"},"Share component with yaml file"),(0,r.kt)("p",null,"If it is not possible to share with Python code, you can share components with a YAML file and use them.\nTo do this, first convert the component to a YAML file and then use it in the pipeline with ",(0,r.kt)("inlineCode",{parentName:"p"},"kfp.components.load_component_from_file"),"."),(0,r.kt)("p",null,"First, let's explain the process of converting the written component to a YAML file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import create_component_from_func\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\nif __name__ == "__main__":\n print_and_return_number.component_spec.save("print_and_return_number.yaml")\n')),(0,r.kt)("p",null,"If you run the Python code you wrote, a file called ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," will be created. When you check the file, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Print and return number\ninputs:\n- {name: number, type: Integer}\noutputs:\n- {name: Output, type: Integer}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n args:\n - --number\n - {inputValue: number}\n - \'----output-paths\'\n - {outputPath: Output}\n')),(0,r.kt)("p",null,"Now the generated file can be shared and used in the pipeline as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import load_component_from_file\n\nprint_and_return_number = load_component_from_file("print_and_return_number.yaml")\n')),(0,r.kt)("h2",{id:"how-kubeflow-executes-component"},"How Kubeflow executes component"),(0,r.kt)("p",null,"In Kubeflow, the execution order of components is as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull "),": Pull the image containing the execution environment information of the defined component."),(0,r.kt)("li",{parentName:"ol"},"Run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"),": Execute the component's content within the pulled image.")),(0,r.kt)("p",null,"Taking ",(0,r.kt)("inlineCode",{parentName:"p"},"print_and_return_number.yaml")," as an example, the default image in ",(0,r.kt)("inlineCode",{parentName:"p"},"@create_component_from_func")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7"),", so the component's content will be executed based on that image."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"print(number)"))),(0,r.kt)("h2",{id:"references"},"References:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/python-function-components/#getting-started-with-python-function-based-components"},"Getting Started With Python function based components"))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/3d1e1011.1a6a061e.js b/en/assets/js/3d1e1011.40bbebc6.js similarity index 84% rename from en/assets/js/3d1e1011.1a6a061e.js rename to en/assets/js/3d1e1011.40bbebc6.js index 396f66df..9878bee6 100644 --- a/en/assets/js/3d1e1011.1a6a061e.js +++ b/en/assets/js/3d1e1011.40bbebc6.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7306],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),p=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=p(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=p(n),m=a,f=c["".concat(i,".").concat(m)]||c[m]||u[m]||o;return n?r.createElement(f,l(l({ref:t},d),{},{components:n})):r.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:a,l[1]=s;for(var p=2;p{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var r=n(7462),a=(n(7294),n(3905));const o={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,s={unversionedId:"api-deployment/seldon-pg",id:"version-1.0/api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/en/docs/1.0/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/1.0/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/en/docs/1.0/api-deployment/seldon-fields"}},i={},p=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"Dashboard",id:"dashboard",level:3},{value:"Request API",id:"request-api",level:3}],d={toc:p},c="wrapper";function u(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,r.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,a.kt)("p",null,"Now, let's perform repeated API requests with the SeldonDeployment we created on the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/api-deployment/seldon-iris"},"previous page")," and check if the dashboard changes."),(0,a.kt)("h3",{id:"dashboard"},"Dashboard"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-pg"},"Forward the dashboard created earlier"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,a.kt)("h3",{id:"request-api"},"Request API"),(0,a.kt)("p",null,"Request ",(0,a.kt)("strong",{parentName:"p"},"repeated")," to the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/api-deployment/seldon-iris#using-cli"},"previously created Seldon Deployment"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,a.kt)("p",null,"Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from ",(0,a.kt)("inlineCode",{parentName:"p"},"0 ops"),"."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"repeat-raise.png",src:n(3348).Z,width:"5016",height:"2826"})),(0,a.kt)("p",null,"This confirms that Prometheus and Grafana have been successfully installed and configured."))}u.isMDXComponent=!0},3348:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7306],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function s(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),p=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},d=function(e){var t=p(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(n),m=a,f=c["".concat(i,".").concat(m)]||c[m]||u[m]||o;return n?r.createElement(f,s(s({ref:t},d),{},{components:n})):r.createElement(f,s({ref:t},d))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,s=new Array(o);s[0]=m;var l={};for(var i in t)hasOwnProperty.call(t,i)&&(l[i]=t[i]);l.originalType=e,l[c]="string"==typeof e?e:a,s[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>s,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var r=n(7462),a=(n(7294),n(3905));const o={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},s=void 0,l={unversionedId:"api-deployment/seldon-pg",id:"version-1.0/api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/en/docs/1.0/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-pg.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/1.0/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/en/docs/1.0/api-deployment/seldon-fields"}},i={},p=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"Dashboard",id:"dashboard",level:3},{value:"Request API",id:"request-api",level:3}],d={toc:p},c="wrapper";function u(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,r.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,a.kt)("p",null,"Now, let's perform repeated API requests with the SeldonDeployment we created on the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/api-deployment/seldon-iris"},"previous page")," and check if the dashboard changes."),(0,a.kt)("h3",{id:"dashboard"},"Dashboard"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-pg"},"Forward the dashboard created earlier"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,a.kt)("h3",{id:"request-api"},"Request API"),(0,a.kt)("p",null,"Request ",(0,a.kt)("strong",{parentName:"p"},"repeated")," to the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/api-deployment/seldon-iris#using-cli"},"previously created Seldon Deployment"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,a.kt)("p",null,"Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from ",(0,a.kt)("inlineCode",{parentName:"p"},"0 ops"),"."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"repeat-raise.png",src:n(3348).Z,width:"5016",height:"2826"})),(0,a.kt)("p",null,"This confirms that Prometheus and Grafana have been successfully installed and configured."))}u.isMDXComponent=!0},3348:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file diff --git a/en/assets/js/404a71d4.b23225a2.js b/en/assets/js/404a71d4.ce43d136.js similarity index 99% rename from en/assets/js/404a71d4.b23225a2.js rename to en/assets/js/404a71d4.ce43d136.js index d9f8cf8a..3f10b1f6 100644 --- a/en/assets/js/404a71d4.b23225a2.js +++ b/en/assets/js/404a71d4.ce43d136.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5036],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>_});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),m=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=m(e.components);return a.createElement(s.Provider,{value:n},e.children)},p="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),p=m(t),u=l,_=p["".concat(s,".").concat(u)]||p[u]||c[u]||r;return t?a.createElement(_,o(o({ref:n},d),{},{components:t})):a.createElement(_,o({ref:n},d))}));function _(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:l,o[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-children",id:"api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/en/docs/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-children.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/en/docs/api-deployment/seldon-mlflow"},next:{title:"1. Install Python virtual environment",permalink:"/en/docs/appendix/pyenv"}},s={},m=[],d={toc:m},p="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(p,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("p",null,"Previously, the methods explained were all targeted at a single model. On this page, we will look at how to connect multiple models. "),(0,l.kt)("p",null,"First, we will create a pipeline that creates two models. We will add a StandardScaler to the SVC model we used before and store it."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"If you upload the pipeline, it will look like this.\n",(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(1092).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"When you check the MLflow dashboard, two models will be generated, as shown below. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(536).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"After checking the run_id of each one, define the SeldonDeployment spec as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc."),(0,l.kt)("p",null,"Now let's create the above specifications in a yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"Create an API through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"If properly performed, it will be outputted as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"Check to see if it has been generated normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"If it is created normally, a similar pod will be created."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},1092:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},536:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5036],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>_});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),m=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=m(e.components);return a.createElement(s.Provider,{value:n},e.children)},p="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),p=m(t),u=l,_=p["".concat(s,".").concat(u)]||p[u]||c[u]||r;return t?a.createElement(_,o(o({ref:n},d),{},{components:t})):a.createElement(_,o({ref:n},d))}));function _(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:l,o[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-children",id:"api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/en/docs/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-children.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/en/docs/api-deployment/seldon-mlflow"},next:{title:"1. Install Python virtual environment",permalink:"/en/docs/appendix/pyenv"}},s={},m=[],d={toc:m},p="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(p,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("p",null,"Previously, the methods explained were all targeted at a single model. On this page, we will look at how to connect multiple models. "),(0,l.kt)("p",null,"First, we will create a pipeline that creates two models. We will add a StandardScaler to the SVC model we used before and store it."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"If you upload the pipeline, it will look like this.\n",(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(1092).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"When you check the MLflow dashboard, two models will be generated, as shown below. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(536).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"After checking the run_id of each one, define the SeldonDeployment spec as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc."),(0,l.kt)("p",null,"Now let's create the above specifications in a yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"Create an API through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"If properly performed, it will be outputted as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"Check to see if it has been generated normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"If it is created normally, a similar pod will be created."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},1092:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},536:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file diff --git a/en/assets/js/4252e969.98aeac05.js b/en/assets/js/4252e969.9b059c64.js similarity index 99% rename from en/assets/js/4252e969.98aeac05.js rename to en/assets/js/4252e969.9b059c64.js index 5ab35fe3..30436e42 100644 --- a/en/assets/js/4252e969.98aeac05.js +++ b/en/assets/js/4252e969.9b059c64.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[605],{3905:(e,t,l)=>{l.d(t,{Zo:()=>c,kt:()=>f});var n=l(7294);function a(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function r(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(a[l]=e[l]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(a[l]=e[l])}return a}var i=n.createContext({}),p=function(e){var t=n.useContext(i),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var l=e.components,a=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),m=p(l),d=a,f=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return l?n.createElement(f,o(o({ref:t},c),{},{components:l})):n.createElement(f,o({ref:t},c))}));function f(e,t){var l=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=l.length,o=new Array(r);o[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[m]="string"==typeof e?e:a,o[1]=s;for(var p=2;p{l.r(t),l.d(t,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=l(7462),a=(l(7294),l(3905));const r={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"version-1.0/setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/en/docs/1.0/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/en/docs/1.0/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/en/docs/1.0/setup-components/install-components-seldon"}},i={},p=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"Install PostgreSQL DB",id:"install-postgresql-db",level:3},{value:"Setup Minio",id:"setup-minio",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3}],c={toc:p},m="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,a.kt)("p",null,"MLflow is a popular open-source ML experiment management tool. In addition to ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"experiment management"),", MLflow provides functionalities for ML ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"model packaging"),", ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"deployment management"),", and ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"model storage"),"."),(0,a.kt)("p",null,"In ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will be using MLflow for experiment management purposes.",(0,a.kt)("br",{parentName:"p"}),"\n","o store the data managed by MLflow and provide a user interface, we will deploy the MLflow Tracking Server on the Kubernetes cluster."),(0,a.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"install-postgresql-db"},"Install PostgreSQL DB"),(0,a.kt)("p",null,"MLflow Tracking Server deploys a PostgreSQL DB for use as a Backend Store to a Kubernetes cluster."),(0,a.kt)("p",null,"First, create a namespace called ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,a.kt)("p",null,"If the following message is output, it means that it has been generated normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,a.kt)("p",null,"Create a Postgresql DB in the ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,a.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,a.kt)("p",null,"Wait until one postgresql related pod is running in the mlflow-system namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,a.kt)("p",null,"If it is output similar to the following, it has executed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,a.kt)("h3",{id:"setup-minio"},"Setup Minio"),(0,a.kt)("p",null,"We will utilize the Minio that was installed in the previous Kubeflow installation step.\nHowever, in order to separate it for kubeflow and mlflow purposes, we will create a mlflow-specific bucket.",(0,a.kt)("br",{parentName:"p"}),"\n","First, port-forward the minio-service to access Minio and create the bucket."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000")," to display the following screen."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"minio-install",src:l(1403).Z,width:"2906",height:"1946"})),(0,a.kt)("p",null,"Enter the following credentials to log in: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Username: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio")),(0,a.kt)("li",{parentName:"ul"},"Password: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,a.kt)("p",null,"Click the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"+"))," button on the right side bottom, then click ",(0,a.kt)("inlineCode",{parentName:"p"},"Create Bucket"),". "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create-bucket",src:l(7745).Z,width:"2902",height:"1950"})),(0,a.kt)("p",null,"Enter ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"Bucket Name")," to create the bucket."),(0,a.kt)("p",null,"If successfully created, you will see a bucket named ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," on the left.\n",(0,a.kt)("img",{alt:"mlflow-bucket",src:l(5455).Z,width:"2902",height:"1950"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,a.kt)("p",null,"If the following message is displayed, it means it has been added successfully."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,a.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,a.kt)("p",null,"If the following message is displayed, it means that the update has been successfully completed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,a.kt)("h3",{id:"helm-install"},"Helm Install"),(0,a.kt)("p",null,"Install mlflow-server Helm Chart version 0.2.0."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"The above Helm chart installs MLflow with the connection information for its backend store and artifacts store set to the default minio created during the Kubeflow installation process and the postgresql information created from the ",(0,a.kt)("a",{parentName:"li",href:"#postgresql-db-installation"},"PostgreSQL DB installation")," above.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use a separate DB or object storage, please refer to the ",(0,a.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo")," and set the values separately during helm install.")))),(0,a.kt)("p",null,"The following message should be displayed:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,a.kt)("p",null,"Check to see if it was installed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,a.kt)("p",null,"Wait until one mlflow-server related pod is running in the mlflow-system namespace.",(0,a.kt)("br",{parentName:"p"}),"\n","If it is output similar to the following, then it has been successfully executed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,a.kt)("h3",{id:"check-installation"},"Check installation"),(0,a.kt)("p",null,"Let's now check if we can successfully connect to the MLflow Server."),(0,a.kt)("p",null,"First, we will perform port forwarding in order to connect from the client node."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000")," and the following screen will be output."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlflow-install",src:l(8319).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},7745:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},1403:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},5455:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},8319:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[605],{3905:(e,t,l)=>{l.d(t,{Zo:()=>c,kt:()=>f});var n=l(7294);function a(e,t,l){return t in e?Object.defineProperty(e,t,{value:l,enumerable:!0,configurable:!0,writable:!0}):e[t]=l,e}function r(e,t){var l=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),l.push.apply(l,n)}return l}function o(e){for(var t=1;t=0||(a[l]=e[l]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,l)&&(a[l]=e[l])}return a}var i=n.createContext({}),p=function(e){var t=n.useContext(i),l=t;return e&&(l="function"==typeof e?e(t):o(o({},t),e)),l},c=function(e){var t=p(e.components);return n.createElement(i.Provider,{value:t},e.children)},m="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var l=e.components,a=e.mdxType,r=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),m=p(l),d=a,f=m["".concat(i,".").concat(d)]||m[d]||u[d]||r;return l?n.createElement(f,o(o({ref:t},c),{},{components:l})):n.createElement(f,o({ref:t},c))}));function f(e,t){var l=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=l.length,o=new Array(r);o[0]=d;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[m]="string"==typeof e?e:a,o[1]=s;for(var p=2;p{l.r(t),l.d(t,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>r,metadata:()=>s,toc:()=>p});var n=l(7462),a=(l(7294),l(3905));const r={title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,s={unversionedId:"setup-components/install-components-mlflow",id:"version-1.0/setup-components/install-components-mlflow",title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-mlflow.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-mlflow",permalink:"/en/docs/1.0/setup-components/install-components-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. MLflow Tracking Server",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - MLflow",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow",permalink:"/en/docs/1.0/setup-components/install-components-kf"},next:{title:"3. Seldon-Core",permalink:"/en/docs/1.0/setup-components/install-components-seldon"}},i={},p=[{value:"Install MLflow Tracking Server",id:"install-mlflow-tracking-server",level:2},{value:"Before Install MLflow Tracking Server",id:"before-install-mlflow-tracking-server",level:2},{value:"Install PostgreSQL DB",id:"install-postgresql-db",level:3},{value:"Setup Minio",id:"setup-minio",level:3},{value:"Let's Install MLflow Tracking Server",id:"lets-install-mlflow-tracking-server",level:2},{value:"Add Helm Repository",id:"add-helm-repository",level:3},{value:"Update Helm Repository",id:"update-helm-repository",level:3},{value:"Helm Install",id:"helm-install",level:3},{value:"Check installation",id:"check-installation",level:3}],c={toc:p},m="wrapper";function u(e){let{components:t,...r}=e;return(0,a.kt)(m,(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"install-mlflow-tracking-server"},"Install MLflow Tracking Server"),(0,a.kt)("p",null,"MLflow is a popular open-source ML experiment management tool. In addition to ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/tracking.html#tracking"},"experiment management"),", MLflow provides functionalities for ML ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/projects.html#projects"},"model packaging"),", ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/models.html#models"},"deployment management"),", and ",(0,a.kt)("a",{parentName:"p",href:"https://mlflow.org/docs/latest/model-registry.html#registry"},"model storage"),"."),(0,a.kt)("p",null,"In ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will be using MLflow for experiment management purposes.",(0,a.kt)("br",{parentName:"p"}),"\n","o store the data managed by MLflow and provide a user interface, we will deploy the MLflow Tracking Server on the Kubernetes cluster."),(0,a.kt)("h2",{id:"before-install-mlflow-tracking-server"},"Before Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"install-postgresql-db"},"Install PostgreSQL DB"),(0,a.kt)("p",null,"MLflow Tracking Server deploys a PostgreSQL DB for use as a Backend Store to a Kubernetes cluster."),(0,a.kt)("p",null,"First, create a namespace called ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create ns mlflow-system\n")),(0,a.kt)("p",null,"If the following message is output, it means that it has been generated normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/mlflow-system created\n")),(0,a.kt)("p",null,"Create a Postgresql DB in the ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow-system")," namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n mlflow-system apply -f https://raw.githubusercontent.com/mlops-for-all/helm-charts/b94b5fe4133f769c04b25068b98ccfa7a505aa60/mlflow/manifests/postgres.yaml \n")),(0,a.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"service/postgresql-mlflow-service created\ndeployment.apps/postgresql-mlflow created\npersistentvolumeclaim/postgresql-mlflow-pvc created\n")),(0,a.kt)("p",null,"Wait until one postgresql related pod is running in the mlflow-system namespace."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep postgresql\n")),(0,a.kt)("p",null,"If it is output similar to the following, it has executed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"postgresql-mlflow-7b9bc8c79f-srkh7 1/1 Running 0 38s\n")),(0,a.kt)("h3",{id:"setup-minio"},"Setup Minio"),(0,a.kt)("p",null,"We will utilize the Minio that was installed in the previous Kubeflow installation step.\nHowever, in order to separate it for kubeflow and mlflow purposes, we will create a mlflow-specific bucket.",(0,a.kt)("br",{parentName:"p"}),"\n","First, port-forward the minio-service to access Minio and create the bucket."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/minio-service -n kubeflow 9000:9000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:9000"},"localhost:9000")," to display the following screen."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"minio-install",src:l(1403).Z,width:"2906",height:"1946"})),(0,a.kt)("p",null,"Enter the following credentials to log in: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Username: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio")),(0,a.kt)("li",{parentName:"ul"},"Password: ",(0,a.kt)("inlineCode",{parentName:"li"},"minio123"))),(0,a.kt)("p",null,"Click the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"+"))," button on the right side bottom, then click ",(0,a.kt)("inlineCode",{parentName:"p"},"Create Bucket"),". "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create-bucket",src:l(7745).Z,width:"2902",height:"1950"})),(0,a.kt)("p",null,"Enter ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," in ",(0,a.kt)("inlineCode",{parentName:"p"},"Bucket Name")," to create the bucket."),(0,a.kt)("p",null,"If successfully created, you will see a bucket named ",(0,a.kt)("inlineCode",{parentName:"p"},"mlflow")," on the left.\n",(0,a.kt)("img",{alt:"mlflow-bucket",src:l(5455).Z,width:"2902",height:"1950"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"lets-install-mlflow-tracking-server"},"Let's Install MLflow Tracking Server"),(0,a.kt)("h3",{id:"add-helm-repository"},"Add Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts\n")),(0,a.kt)("p",null,"If the following message is displayed, it means it has been added successfully."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'"mlops-for-all" has been added to your repositories\n')),(0,a.kt)("h3",{id:"update-helm-repository"},"Update Helm Repository"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,a.kt)("p",null,"If the following message is displayed, it means that the update has been successfully completed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "mlops-for-all" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,a.kt)("h3",{id:"helm-install"},"Helm Install"),(0,a.kt)("p",null,"Install mlflow-server Helm Chart version 0.2.0."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"helm install mlflow-server mlops-for-all/mlflow-server \\\n --namespace mlflow-system \\\n --version 0.2.0\n")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"The above Helm chart installs MLflow with the connection information for its backend store and artifacts store set to the default minio created during the Kubeflow installation process and the postgresql information created from the ",(0,a.kt)("a",{parentName:"li",href:"#postgresql-db-installation"},"PostgreSQL DB installation")," above.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use a separate DB or object storage, please refer to the ",(0,a.kt)("a",{parentName:"li",href:"https://github.com/mlops-for-all/helm-charts/tree/main/mlflow/chart"},"Helm Chart Repo")," and set the values separately during helm install.")))),(0,a.kt)("p",null,"The following message should be displayed:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME: mlflow-server\nLAST DEPLOYED: Sat Dec 18 22:02:13 2021\nNAMESPACE: mlflow-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,a.kt)("p",null,"Check to see if it was installed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n mlflow-system | grep mlflow-server\n")),(0,a.kt)("p",null,"Wait until one mlflow-server related pod is running in the mlflow-system namespace.",(0,a.kt)("br",{parentName:"p"}),"\n","If it is output similar to the following, then it has been successfully executed."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow-server-ffd66d858-6hm62 1/1 Running 0 74s\n")),(0,a.kt)("h3",{id:"check-installation"},"Check installation"),(0,a.kt)("p",null,"Let's now check if we can successfully connect to the MLflow Server."),(0,a.kt)("p",null,"First, we will perform port forwarding in order to connect from the client node."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,a.kt)("p",null,"Open a web browser and connect to ",(0,a.kt)("a",{parentName:"p",href:"http://localhost:5000"},"localhost:5000")," and the following screen will be output."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlflow-install",src:l(8319).Z,width:"2882",height:"1464"})))}u.isMDXComponent=!0},7745:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/create-bucket-58bd2a673744c0144ffb14a2aeeef821.png"},1403:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/minio-install-587ecd302eecc621dbb568c124c80ccf.png"},5455:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-bucket-63b427bd7a5147b8bae2ac69c57facff.png"},8319:(e,t,l)=>{l.d(t,{Z:()=>n});const n=l.p+"assets/images/mlflow-install-b3920befde2af7fdbf3677ab12036440.png"}}]); \ No newline at end of file diff --git a/en/assets/js/45ae3dfd.4b89cb0e.js b/en/assets/js/45ae3dfd.55c467df.js similarity index 98% rename from en/assets/js/45ae3dfd.4b89cb0e.js rename to en/assets/js/45ae3dfd.55c467df.js index 54d2ff25..562d6abd 100644 --- a/en/assets/js/45ae3dfd.4b89cb0e.js +++ b/en/assets/js/45ae3dfd.55c467df.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2300],{3905:(t,e,a)=>{a.d(e,{Zo:()=>s,kt:()=>g});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function o(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),m=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):o(o({},e),t)),a},s=function(t){var e=m(t.components);return n.createElement(i.Provider,{value:e},t.children)},d="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},c=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,s=p(t,["components","mdxType","originalType","parentName"]),d=m(a),c=r,g=d["".concat(i,".").concat(c)]||d[c]||u[c]||l;return a?n.createElement(g,o(o({ref:e},s),{},{components:a})):n.createElement(g,o({ref:e},s))}));function g(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,o=new Array(l);o[0]=c;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[d]="string"==typeof t?t:r,o[1]=p;for(var m=2;m{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>p,toc:()=>m});var n=a(7462),r=(a(7294),a(3905));const l={title:"Further Readings",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},o=void 0,p={unversionedId:"further-readings/info",id:"further-readings/info",title:"Further Readings",description:"MLOps Component",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/en/docs/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/further-readings/info.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",frontMatter:{title:"Further Readings",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/appendix/metallb"}},i={},m=[{value:"MLOps Component",id:"mlops-component",level:2}],s={toc:m},d="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(d,(0,n.Z)({},s,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,"From the components covered in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/introduction/component"},"MLOps Concepts"),", the following diagram illustrates them. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(7941).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"The technology stacks covered in ",(0,r.kt)("em",{parentName:"p"},"Everyone's MLOps")," are as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(3017).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"| | Storage | ",(0,r.kt)("a",{parentName:"p",href:"https://min.io/"},"Minio")," |\n| | Data Processing | ",(0,r.kt)("a",{parentName:"p",href:"https://spark.apache.org/"},"Apache Spark")," |\n| | Data Visualization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.tableau.com/"},"Tableau")," |\n| Workflow Mgmt. | Orchestration | ",(0,r.kt)("a",{parentName:"p",href:"https://airflow.apache.org/"},"Airflow")," |\n| | Scheduling | ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/"},"Kubernetes")," |\n| Security & Compliance | Authentication & Authorization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openldap.org/"},"Ldap")," |\n| | Data Encryption & Tokenization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.vaultproject.io/"},"Vault")," |\n| | Governance & Auditing | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openpolicyagent.org/"},"Open Policy Agent")," |"),(0,r.kt)("p",null,"As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(3314).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"For details:"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},7941:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},3017:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},3314:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2300],{3905:(t,e,a)=>{a.d(e,{Zo:()=>s,kt:()=>g});var n=a(7294);function r(t,e,a){return e in t?Object.defineProperty(t,e,{value:a,enumerable:!0,configurable:!0,writable:!0}):t[e]=a,t}function l(t,e){var a=Object.keys(t);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(t);e&&(n=n.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),a.push.apply(a,n)}return a}function o(t){for(var e=1;e=0||(r[a]=t[a]);return r}(t,e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(t);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(t,a)&&(r[a]=t[a])}return r}var i=n.createContext({}),m=function(t){var e=n.useContext(i),a=e;return t&&(a="function"==typeof t?t(e):o(o({},e),t)),a},s=function(t){var e=m(t.components);return n.createElement(i.Provider,{value:e},t.children)},d="mdxType",u={inlineCode:"code",wrapper:function(t){var e=t.children;return n.createElement(n.Fragment,{},e)}},c=n.forwardRef((function(t,e){var a=t.components,r=t.mdxType,l=t.originalType,i=t.parentName,s=p(t,["components","mdxType","originalType","parentName"]),d=m(a),c=r,g=d["".concat(i,".").concat(c)]||d[c]||u[c]||l;return a?n.createElement(g,o(o({ref:e},s),{},{components:a})):n.createElement(g,o({ref:e},s))}));function g(t,e){var a=arguments,r=e&&e.mdxType;if("string"==typeof t||r){var l=a.length,o=new Array(l);o[0]=c;var p={};for(var i in e)hasOwnProperty.call(e,i)&&(p[i]=e[i]);p.originalType=t,p[d]="string"==typeof t?t:r,o[1]=p;for(var m=2;m{a.r(e),a.d(e,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>l,metadata:()=>p,toc:()=>m});var n=a(7462),r=(a(7294),a(3905));const l={title:"Further Readings",date:new Date("2021-12-21T00:00:00.000Z"),lastmod:new Date("2021-12-21T00:00:00.000Z")},o=void 0,p={unversionedId:"further-readings/info",id:"further-readings/info",title:"Further Readings",description:"MLOps Component",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/further-readings/info.md",sourceDirName:"further-readings",slug:"/further-readings/info",permalink:"/en/docs/further-readings/info",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/further-readings/info.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",frontMatter:{title:"Further Readings",date:"2021-12-21T00:00:00.000Z",lastmod:"2021-12-21T00:00:00.000Z"},sidebar:"tutorialSidebar",previous:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/appendix/metallb"}},i={},m=[{value:"MLOps Component",id:"mlops-component",level:2}],s={toc:m},d="wrapper";function u(t){let{components:e,...l}=t;return(0,r.kt)(d,(0,n.Z)({},s,l,{components:e,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops-component"},"MLOps Component"),(0,r.kt)("p",null,"From the components covered in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/introduction/component"},"MLOps Concepts"),", the following diagram illustrates them. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-0.png",src:a(7941).Z,width:"1600",height:"588"})),(0,r.kt)("p",null,"The technology stacks covered in ",(0,r.kt)("em",{parentName:"p"},"Everyone's MLOps")," are as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-1.png",src:a(3017).Z,width:"1600",height:"594"})),(0,r.kt)("p",null,"| | Storage | ",(0,r.kt)("a",{parentName:"p",href:"https://min.io/"},"Minio")," |\n| | Data Processing | ",(0,r.kt)("a",{parentName:"p",href:"https://spark.apache.org/"},"Apache Spark")," |\n| | Data Visualization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.tableau.com/"},"Tableau")," |\n| Workflow Mgmt. | Orchestration | ",(0,r.kt)("a",{parentName:"p",href:"https://airflow.apache.org/"},"Airflow")," |\n| | Scheduling | ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/"},"Kubernetes")," |\n| Security & Compliance | Authentication & Authorization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openldap.org/"},"Ldap")," |\n| | Data Encryption & Tokenization | ",(0,r.kt)("a",{parentName:"p",href:"https://www.vaultproject.io/"},"Vault")," |\n| | Governance & Auditing | ",(0,r.kt)("a",{parentName:"p",href:"https://www.openpolicyagent.org/"},"Open Policy Agent")," |"),(0,r.kt)("p",null,"As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"open-stacks-2.png",src:a(3314).Z,width:"1616",height:"588"})),(0,r.kt)("p",null,"For details:"),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"Mgmt."),(0,r.kt)("th",{parentName:"tr",align:null},"Component"),(0,r.kt)("th",{parentName:"tr",align:null},"Open Soruce"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Data Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Collection"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kafka.apache.org/"},"Kafka"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Validation"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://beam.apache.org/"},"Beam"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Feature Store"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://flink.apache.org/"},"Flink"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"ML Model Dev. & Experiment"),(0,r.kt)("td",{parentName:"tr",align:null},"Modeling"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://jupyter.org/"},"Jupyter"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Analysis & Experiment Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://mlflow.org/"},"MLflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"HPO Tuning & AutoML"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/kubeflow/katib"},"Katib"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Deploy Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Serving Framework"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.seldon.io/projects/seldon-core/en/latest/index.html"},"Seldon Core"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"A/B Test"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://iter8.tools/"},"Iter8"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Monitoring"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://grafana.com/oss/grafana/"},"Grafana"),", ",(0,r.kt)("a",{parentName:"td",href:"https://prometheus.io/"},"Prometheus"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Process Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"pipeline"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.kubeflow.org/"},"Kubeflow"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"CI/CD"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://docs.github.com/en/actions"},"Github Action"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Continuous Training"),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://argoproj.github.io/events/"},"Argo Events"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"Platform Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},"Configuration Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://www.consul.io/"},"Consul"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Code Version Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://github.com/"},"Github"),", ",(0,r.kt)("a",{parentName:"td",href:"https://min.io/"},"Minio"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Logging"),(0,r.kt)("td",{parentName:"tr",align:null},"(EFK) ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/elasticsearch/"},"Elastic Search"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.fluentd.org/"},"Fluentd"),", ",(0,r.kt)("a",{parentName:"td",href:"https://www.elastic.co/kr/kibana/"},"Kibana"))),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null}),(0,r.kt)("td",{parentName:"tr",align:null},"Resource Mgmt."),(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("a",{parentName:"td",href:"https://kubernetes.io/"},"Kubernetes"))))))}u.isMDXComponent=!0},7941:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-0-75a5736738cbd950e04122e6252dc2c1.png"},3017:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-1-1ab94bd3c5f055c056a4ffc84f4f03f4.png"},3314:(t,e,a)=>{a.d(e,{Z:()=>n});const n=a.p+"assets/images/open-stacks-2-32f97815a2c7d02a32f080a996712ca6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/4c6b0ea3.3235ee4b.js b/en/assets/js/4c6b0ea3.a893b667.js similarity index 99% rename from en/assets/js/4c6b0ea3.3235ee4b.js rename to en/assets/js/4c6b0ea3.a893b667.js index ee731d8f..2badd761 100644 --- a/en/assets/js/4c6b0ea3.3235ee4b.js +++ b/en/assets/js/4c6b0ea3.a893b667.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2984],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>m});var i=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);n&&(i=i.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,i)}return t}function s(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var u=i.createContext({}),o=function(e){var n=i.useContext(u),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=o(e.components);return i.createElement(u.Provider,{value:n},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return i.createElement(i.Fragment,{},n)}},b=i.forwardRef((function(e,n){var t=e.components,a=e.mdxType,r=e.originalType,u=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=o(t),b=a,m=p["".concat(u,".").concat(b)]||p[b]||k[b]||r;return t?i.createElement(m,s(s({ref:n},d),{},{components:t})):i.createElement(m,s({ref:n},d))}));function m(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var r=t.length,s=new Array(r);s[0]=b;var l={};for(var u in n)hasOwnProperty.call(n,u)&&(l[u]=n[u]);l.originalType=e,l[p]="string"==typeof e?e:a,s[1]=l;for(var o=2;o{t.r(n),t.d(n,{assets:()=>u,contentTitle:()=>s,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>o});var i=t(7462),a=(t(7294),t(3905));const r={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:3},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2}],d={toc:o},p="wrapper";function k(e){let{components:n,...t}=e;return(0,a.kt)(p,(0,i.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,a.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,a.kt)("p",null,"Install the v1.24.0 version of the Minikube binary to use Minikube."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,a.kt)("p",null,"Check if it is installed properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,a.kt)("p",null,"If this message appears, it means the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,a.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,a.kt)("p",null,"Now let's build the Kubernetes cluster using Minikube.\nTo facilitate the smooth use of GPUs and communication between cluster and client, Minikube is run using the ",(0,a.kt)("inlineCode",{parentName:"p"},"driver=none")," option. Please note that this option must be run as root user. "),(0,a.kt)("p",null,"Switch to root user."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,a.kt)("p",null,"Run ",(0,a.kt)("inlineCode",{parentName:"p"},"minikube start")," to build the Kubernetes cluster for Kubeflow's smooth operation, specifying the Kubernetes version as v1.21.7 and adding ",(0,a.kt)("inlineCode",{parentName:"p"},"--extra-config"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,a.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,a.kt)("p",null,"When installing Minikube, there are default addons that are installed. We will disable any addons that we do not intend to use."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,a.kt)("p",null,"Confirm that all addons are disabled."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,a.kt)("p",null,"If the following message appears, it means that the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,a.kt)("h3",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,a.kt)("p",null,"Now, let's install the necessary tools for smooth usage of Kubernetes on the ",(0,a.kt)("strong",{parentName:"p"},"client")," machine. If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are not separated, please note that you need to perform all the operations as the root user."),(0,a.kt)("p",null,"If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are separated, first, we need to retrieve the Kubernetes administrator credentials from the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," to the ",(0,a.kt)("strong",{parentName:"p"},"client"),"."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Check the config on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),":"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Cluster node\nminikube kubectl -- config view --flatten\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"The following information will be displayed:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,a.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,a.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"")))),(0,a.kt)("ol",{start:3},(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Create the ",(0,a.kt)("inlineCode",{parentName:"p"},".kube")," folder on the ",(0,a.kt)("strong",{parentName:"p"},"client")," node:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Client node\nmkdir -p /home/$USER/.kube\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Paste the information obtained from Step 2 into the file and save it:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,a.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,a.kt)("p",null,"Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"If this message appears, it means that the installation has completed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2984],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>m});var i=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);n&&(i=i.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,i)}return t}function s(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var u=i.createContext({}),o=function(e){var n=i.useContext(u),t=n;return e&&(t="function"==typeof e?e(n):s(s({},n),e)),t},d=function(e){var n=o(e.components);return i.createElement(u.Provider,{value:n},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var n=e.children;return i.createElement(i.Fragment,{},n)}},b=i.forwardRef((function(e,n){var t=e.components,a=e.mdxType,r=e.originalType,u=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=o(t),b=a,m=p["".concat(u,".").concat(b)]||p[b]||k[b]||r;return t?i.createElement(m,s(s({ref:n},d),{},{components:t})):i.createElement(m,s({ref:n},d))}));function m(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var r=t.length,s=new Array(r);s[0]=b;var l={};for(var u in n)hasOwnProperty.call(n,u)&&(l[u]=n[u]);l.originalType=e,l[p]="string"==typeof e?e:a,s[1]=l;for(var o=2;o{t.r(n),t.d(n,{assets:()=>u,contentTitle:()=>s,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>o});var i=t(7462),a=(t(7294),t(3905));const r={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:3},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2}],d={toc:o},p="wrapper";function k(e){let{components:n,...t}=e;return(0,a.kt)(p,(0,i.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,a.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,a.kt)("p",null,"Install the v1.24.0 version of the Minikube binary to use Minikube."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,a.kt)("p",null,"Check if it is installed properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,a.kt)("p",null,"If this message appears, it means the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,a.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,a.kt)("p",null,"Now let's build the Kubernetes cluster using Minikube.\nTo facilitate the smooth use of GPUs and communication between cluster and client, Minikube is run using the ",(0,a.kt)("inlineCode",{parentName:"p"},"driver=none")," option. Please note that this option must be run as root user. "),(0,a.kt)("p",null,"Switch to root user."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,a.kt)("p",null,"Run ",(0,a.kt)("inlineCode",{parentName:"p"},"minikube start")," to build the Kubernetes cluster for Kubeflow's smooth operation, specifying the Kubernetes version as v1.21.7 and adding ",(0,a.kt)("inlineCode",{parentName:"p"},"--extra-config"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,a.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,a.kt)("p",null,"When installing Minikube, there are default addons that are installed. We will disable any addons that we do not intend to use."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,a.kt)("p",null,"Confirm that all addons are disabled."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,a.kt)("p",null,"If the following message appears, it means that the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,a.kt)("h3",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,a.kt)("p",null,"Now, let's install the necessary tools for smooth usage of Kubernetes on the ",(0,a.kt)("strong",{parentName:"p"},"client")," machine. If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are not separated, please note that you need to perform all the operations as the root user."),(0,a.kt)("p",null,"If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are separated, first, we need to retrieve the Kubernetes administrator credentials from the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," to the ",(0,a.kt)("strong",{parentName:"p"},"client"),"."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Check the config on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),":"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Cluster node\nminikube kubectl -- config view --flatten\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"The following information will be displayed:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,a.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,a.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"")))),(0,a.kt)("ol",{start:3},(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Create the ",(0,a.kt)("inlineCode",{parentName:"p"},".kube")," folder on the ",(0,a.kt)("strong",{parentName:"p"},"client")," node:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Client node\nmkdir -p /home/$USER/.kube\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Paste the information obtained from Step 2 into the file and save it:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,a.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,a.kt)("p",null,"Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"If this message appears, it means that the installation has completed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/4f70ae63.a90f78eb.js b/en/assets/js/4f70ae63.a875d94f.js similarity index 99% rename from en/assets/js/4f70ae63.a90f78eb.js rename to en/assets/js/4f70ae63.a875d94f.js index c262732e..d9652705 100644 --- a/en/assets/js/4f70ae63.a90f78eb.js +++ b/en/assets/js/4f70ae63.a875d94f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7904],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),s=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=s(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,l=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),d=s(n),m=o,h=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(h,i(i({ref:t},u),{},{components:n})):a.createElement(h,i({ref:t},u))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,i=new Array(r);i[0]=m;var p={};for(var l in t)hasOwnProperty.call(t,l)&&(p[l]=t[l]);p.originalType=e,p[d]="string"==typeof e?e:o,i[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>p,toc:()=>s});var a=n(7462),o=(n(7294),n(3905));const r={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-component",id:"kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/en/docs/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/en/docs/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/en/docs/kubeflow/advanced-environment"}},l={},s=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule for using InputPath/OutputPath",id:"rule-for-using-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],u={toc:s},d="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,o.kt)("p",null,"On this page, we will write the code example from ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," as a component."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"Below is the component content used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"With the necessary Configs for the Component Wrapper, it will look like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"In the ","[Basic Usage Component]","](../kubeflow/basic-component), we explained that you should provide type hints for input and output when describing. But what about complex objects such as dataframes, models, that cannot be used in json?"),(0,o.kt)("p",null,"When passing values between functions in Python, objects can be returned and their value will be stored in the host's memory, so the same object can be used in the next function. However, in Kubeflow, components are running independently on each container, that is, they are not sharing the same memory, so you cannot pass objects in the same way as in a normal Python function. The only information that can be passed between components is in ",(0,o.kt)("inlineCode",{parentName:"p"},"json")," format. Therefore, objects of types that cannot be converted into json format such as Model or DataFrame must be passed in some other way."),(0,o.kt)("p",null,"Kubeflow solves this by storing the data in a file instead of memory, and then using the file to pass information. Since the path of the stored file is a string, it can be passed between components. However, in Kubeflow, the user does not know the path of the file before the execution. For this, Kubeflow provides a magic related to the input and output paths, ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath"),"."),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," literally means the input path, and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," literally means the output path."),(0,o.kt)("p",null,"For example, in a component that generates and returns data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()")," is created as an argument. And in a component that receives data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()")," is created as an argument."),(0,o.kt)("p",null,"Once these are created, when connecting them in a pipeline, Kubeflow automatically generates and inputs the necessary paths. Therefore, users no longer need to worry about the paths and only need to consider the relationships between components."),(0,o.kt)("p",null,"Based on this information, when rewriting the component wrapper, it would look like the following."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"InputPath or OutputPath can accept a string. This string is the format of the file to be input or output.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it does not necessarily mean that the file has to be stored in this format.",(0,o.kt)("br",{parentName:"p"}),"\n","It just serves as a helper for type checking when compiling the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","If the file format is not fixed, then no input is needed (it serves the role of something like ",(0,o.kt)("inlineCode",{parentName:"p"},"Any")," in type hints)."),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"Convert the written component into a format that can be used in Kubeflow."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"rule-for-using-inputpathoutputpath"},"Rule for using InputPath/OutputPath"),(0,o.kt)("p",null,"There are rules to follow when using InputPath or OutputPath arguments in pipeline."),(0,o.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,o.kt)("p",null,"To execute the previously written component, a component that generates data is created since data is required."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,o.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,o.kt)("p",null,"Now let's write the pipeline."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,o.kt)("p",null,"Have you noticed something strange?",(0,o.kt)("br",{parentName:"p"}),"\n","All the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffixes have disappeared from the arguments received in the input and output.",(0,o.kt)("br",{parentName:"p"}),"\n","We can see that instead of accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]'),", we are accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]'),".",(0,o.kt)("br",{parentName:"p"}),"\n","This happens because Kubeflow has a rule that paths created with ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," can be accessed without the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffix when accessed from the pipeline."),(0,o.kt)("p",null,"However, if you upload the pipeline just written, it will not run.",(0,o.kt)("br",{parentName:"p"}),"\n","The reason is explained on the next page."))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7904],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),s=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=s(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,l=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),d=s(n),m=o,h=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(h,i(i({ref:t},u),{},{components:n})):a.createElement(h,i({ref:t},u))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,i=new Array(r);i[0]=m;var p={};for(var l in t)hasOwnProperty.call(t,l)&&(p[l]=t[l]);p.originalType=e,p[d]="string"==typeof e?e:o,i[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>p,toc:()=>s});var a=n(7462),o=(n(7294),n(3905));const r={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-component",id:"kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/en/docs/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/en/docs/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/en/docs/kubeflow/advanced-environment"}},l={},s=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule for using InputPath/OutputPath",id:"rule-for-using-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],u={toc:s},d="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,o.kt)("p",null,"On this page, we will write the code example from ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," as a component."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"Below is the component content used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"With the necessary Configs for the Component Wrapper, it will look like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"In the ","[Basic Usage Component]","](../kubeflow/basic-component), we explained that you should provide type hints for input and output when describing. But what about complex objects such as dataframes, models, that cannot be used in json?"),(0,o.kt)("p",null,"When passing values between functions in Python, objects can be returned and their value will be stored in the host's memory, so the same object can be used in the next function. However, in Kubeflow, components are running independently on each container, that is, they are not sharing the same memory, so you cannot pass objects in the same way as in a normal Python function. The only information that can be passed between components is in ",(0,o.kt)("inlineCode",{parentName:"p"},"json")," format. Therefore, objects of types that cannot be converted into json format such as Model or DataFrame must be passed in some other way."),(0,o.kt)("p",null,"Kubeflow solves this by storing the data in a file instead of memory, and then using the file to pass information. Since the path of the stored file is a string, it can be passed between components. However, in Kubeflow, the user does not know the path of the file before the execution. For this, Kubeflow provides a magic related to the input and output paths, ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath"),"."),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," literally means the input path, and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," literally means the output path."),(0,o.kt)("p",null,"For example, in a component that generates and returns data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()")," is created as an argument. And in a component that receives data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()")," is created as an argument."),(0,o.kt)("p",null,"Once these are created, when connecting them in a pipeline, Kubeflow automatically generates and inputs the necessary paths. Therefore, users no longer need to worry about the paths and only need to consider the relationships between components."),(0,o.kt)("p",null,"Based on this information, when rewriting the component wrapper, it would look like the following."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"InputPath or OutputPath can accept a string. This string is the format of the file to be input or output.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it does not necessarily mean that the file has to be stored in this format.",(0,o.kt)("br",{parentName:"p"}),"\n","It just serves as a helper for type checking when compiling the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","If the file format is not fixed, then no input is needed (it serves the role of something like ",(0,o.kt)("inlineCode",{parentName:"p"},"Any")," in type hints)."),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"Convert the written component into a format that can be used in Kubeflow."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"rule-for-using-inputpathoutputpath"},"Rule for using InputPath/OutputPath"),(0,o.kt)("p",null,"There are rules to follow when using InputPath or OutputPath arguments in pipeline."),(0,o.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,o.kt)("p",null,"To execute the previously written component, a component that generates data is created since data is required."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,o.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,o.kt)("p",null,"Now let's write the pipeline."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,o.kt)("p",null,"Have you noticed something strange?",(0,o.kt)("br",{parentName:"p"}),"\n","All the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffixes have disappeared from the arguments received in the input and output.",(0,o.kt)("br",{parentName:"p"}),"\n","We can see that instead of accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]'),", we are accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]'),".",(0,o.kt)("br",{parentName:"p"}),"\n","This happens because Kubeflow has a rule that paths created with ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," can be accessed without the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffix when accessed from the pipeline."),(0,o.kt)("p",null,"However, if you upload the pipeline just written, it will not run.",(0,o.kt)("br",{parentName:"p"}),"\n","The reason is explained on the next page."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/51a35976.2ca09bec.js b/en/assets/js/51a35976.70485e8c.js similarity index 98% rename from en/assets/js/51a35976.2ca09bec.js rename to en/assets/js/51a35976.70485e8c.js index d03a09c4..b36cc0c2 100644 --- a/en/assets/js/51a35976.2ca09bec.js +++ b/en/assets/js/51a35976.70485e8c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6490],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=a.createContext({}),p=function(e){var t=a.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=p(e.components);return a.createElement(c.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),m=p(n),u=r,k=m["".concat(c,".").concat(u)]||m[u]||d[u]||o;return n?a.createElement(k,i(i({ref:t},s),{},{components:n})):a.createElement(k,i({ref:t},s))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=u;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[m]="string"==typeof e?e:r,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/images",id:"version-1.0/prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/en/docs/1.0/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/images.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/en/docs/1.0/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/en/docs/1.0/prerequisites/docker/advanced"}},c={},p=[],s={toc:p},m="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(m,(0,a.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 \ubc29\ubc95"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},'docker commit -m "message" -a "author" ')),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit")," \uc744 \uc0ac\uc6a9\ud558\uba74, \uc218\ub3d9\uc73c\ub85c Dockerfile \uc744 \ub9cc\ub4e4\uc9c0 \uc54a\uace0\ub3c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"touch Dockerfile\n")))))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Move to the docker-practice folder.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Create an empty file called Dockerfile.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc774\ubbf8\uc9c0\uc5d0 \ud2b9\uc815 \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ubb34\uc5c7\uc785\ub2c8\uae4c?"))),(0,r.kt)("p",null,"Answer: ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN")),(0,r.kt)("p",null,"Translation: Let's look at the basic commands that can be used in Dockerfile one by one. FROM is a command that specifies which image to use as a base image for Dockerfile. When creating a Docker image, instead of creating the environment I intend from scratch, I can use a pre-made image such as ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", etc. as the base and install pytorch and add my source code."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,r.kt)("p",null,"The command to copy files or directories from the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path on the host (local) to the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,r.kt)("p",null,"ADD is similar to COPY but it has additional features."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,r.kt)("p",null,"The command to run the specified command inside a Docker container.\nDocker images maintain the state in which the commands are executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,r.kt)("p",null,"CMD specifies a command that the Docker container will ",(0,r.kt)("strong",{parentName:"p"},"run when it starts"),". There is a similar command called ",(0,r.kt)("strong",{parentName:"p"},"ENTRYPOINT"),". The difference between them will be discussed ",(0,r.kt)("strong",{parentName:"p"},"later"),". Note that only one ",(0,r.kt)("strong",{parentName:"p"},"CMD")," can be run in one Docker image, which is different from ",(0,r.kt)("strong",{parentName:"p"},"RUN")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,r.kt)("p",null,"WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,r.kt)("p",null,"This is a command to set the value of environment variables that will be used continuously inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,r.kt)("p",null,"You can specify the port/protocol to be opened from the container. If ",(0,r.kt)("inlineCode",{parentName:"p"},"")," is not specified, TCP is set as the default."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,r.kt)("p",null,"Write a simple Dockerfile by using ",(0,r.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," or an editor like vscode and write the following:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,r.kt)("p",null,"Use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," command to create a Docker Image from a Dockerfile."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,r.kt)("p",null,"Run the following command from the path where the Dockerfile is located."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,r.kt)("p",null,'The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let\'s check if the image was built successfully.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,r.kt)("p",null,"If performed normally, it will output as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,r.kt)("p",null,"Let's now ",(0,r.kt)("strong",{parentName:"p"},"run")," a docker container with the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image that we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will result in the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,r.kt)("p",null,"Let's run a docker container and change the value of the ",(0,r.kt)("inlineCode",{parentName:"p"},"TEST")," env var at the time of running the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6490],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=a.createContext({}),p=function(e){var t=a.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},s=function(e){var t=p(e.components);return a.createElement(c.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),m=p(n),u=r,k=m["".concat(c,".").concat(u)]||m[u]||d[u]||o;return n?a.createElement(k,i(i({ref:t},s),{},{components:n})):a.createElement(k,i({ref:t},s))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=u;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[m]="string"==typeof e?e:r,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/images",id:"version-1.0/prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/en/docs/1.0/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/images.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/en/docs/1.0/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/en/docs/1.0/prerequisites/docker/advanced"}},c={},p=[],s={toc:p},m="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(m,(0,a.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 \ubc29\ubc95"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},'docker commit -m "message" -a "author" ')),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit")," \uc744 \uc0ac\uc6a9\ud558\uba74, \uc218\ub3d9\uc73c\ub85c Dockerfile \uc744 \ub9cc\ub4e4\uc9c0 \uc54a\uace0\ub3c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"touch Dockerfile\n")))))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Move to the docker-practice folder.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Create an empty file called Dockerfile.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc774\ubbf8\uc9c0\uc5d0 \ud2b9\uc815 \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ubb34\uc5c7\uc785\ub2c8\uae4c?"))),(0,r.kt)("p",null,"Answer: ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN")),(0,r.kt)("p",null,"Translation: Let's look at the basic commands that can be used in Dockerfile one by one. FROM is a command that specifies which image to use as a base image for Dockerfile. When creating a Docker image, instead of creating the environment I intend from scratch, I can use a pre-made image such as ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", etc. as the base and install pytorch and add my source code."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,r.kt)("p",null,"The command to copy files or directories from the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path on the host (local) to the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,r.kt)("p",null,"ADD is similar to COPY but it has additional features."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,r.kt)("p",null,"The command to run the specified command inside a Docker container.\nDocker images maintain the state in which the commands are executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,r.kt)("p",null,"CMD specifies a command that the Docker container will ",(0,r.kt)("strong",{parentName:"p"},"run when it starts"),". There is a similar command called ",(0,r.kt)("strong",{parentName:"p"},"ENTRYPOINT"),". The difference between them will be discussed ",(0,r.kt)("strong",{parentName:"p"},"later"),". Note that only one ",(0,r.kt)("strong",{parentName:"p"},"CMD")," can be run in one Docker image, which is different from ",(0,r.kt)("strong",{parentName:"p"},"RUN")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,r.kt)("p",null,"WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,r.kt)("p",null,"This is a command to set the value of environment variables that will be used continuously inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,r.kt)("p",null,"You can specify the port/protocol to be opened from the container. If ",(0,r.kt)("inlineCode",{parentName:"p"},"")," is not specified, TCP is set as the default."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,r.kt)("p",null,"Write a simple Dockerfile by using ",(0,r.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," or an editor like vscode and write the following:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,r.kt)("p",null,"Use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," command to create a Docker Image from a Dockerfile."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,r.kt)("p",null,"Run the following command from the path where the Dockerfile is located."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,r.kt)("p",null,'The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let\'s check if the image was built successfully.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,r.kt)("p",null,"If performed normally, it will output as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,r.kt)("p",null,"Let's now ",(0,r.kt)("strong",{parentName:"p"},"run")," a docker container with the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image that we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will result in the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,r.kt)("p",null,"Let's run a docker container and change the value of the ",(0,r.kt)("inlineCode",{parentName:"p"},"TEST")," env var at the time of running the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/52a0bca6.ea84c72f.js b/en/assets/js/52a0bca6.d748758f.js similarity index 98% rename from en/assets/js/52a0bca6.ea84c72f.js rename to en/assets/js/52a0bca6.d748758f.js index 83981f87..75a36a3a 100644 --- a/en/assets/js/52a0bca6.ea84c72f.js +++ b/en/assets/js/52a0bca6.d748758f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1229],{3905:(e,t,o)=>{o.d(t,{Zo:()=>c,kt:()=>h});var n=o(7294);function r(e,t,o){return t in e?Object.defineProperty(e,t,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[t]=o,e}function a(e,t){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),o.push.apply(o,n)}return o}function s(e){for(var t=1;t=0||(r[o]=e[o]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(r[o]=e[o])}return r}var l=n.createContext({}),u=function(e){var t=n.useContext(l),o=t;return e&&(o="function"==typeof e?e(t):s(s({},t),e)),o},c=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var o=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=u(o),m=r,h=d["".concat(l,".").concat(m)]||d[m]||p[m]||a;return o?n.createElement(h,s(s({ref:t},c),{},{components:o})):n.createElement(h,s({ref:t},c))}));function h(e,t){var o=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=o.length,s=new Array(a);s[0]=m;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var u=2;u{o.r(t),o.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var n=o(7462),r=(o(7294),o(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/volumes",id:"kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/en/docs/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/volumes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/en/docs/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/en/docs/kubeflow-dashboard-guide/experiments"}},l={},u=[{value:"Volumes",id:"volumes",level:2},{value:"Creating a Volume",id:"creating-a-volume",level:2}],c={toc:u},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"volumes"},"Volumes"),(0,r.kt)("p",null,"Next, let's click on the Volumes tab in the left of the Central Dashboard."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:o(9268).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"You will see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"volumes",src:o(541).Z,width:"1386",height:"382"})),(0,r.kt)("p",null,"Volumes tab provides the functionality to manage the Persistent Volume Claims (PVC) belonging to the current user's namespace in Kubernetes' Volume (Volume)."),(0,r.kt)("p",null,"By looking at the screenshot, you can see the information of the Volume created on the ",(0,r.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," page. It can be seen that the Storage Class of the Volume is set to local-path, which is the Default Storage Class installed at the time of Kubernetes cluster installation."),(0,r.kt)("p",null,"In addition, the Volumes page can be used if you want to create, view, or delete a new Volume in the user namespace."),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"creating-a-volume"},"Creating a Volume"),(0,r.kt)("p",null,"By clicking the ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," button at the top right, you can see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-volume",src:o(7932).Z,width:"1192",height:"934"})),(0,r.kt)("p",null,"You can create a volume by specifying its name, size, storage class, and access mode."),(0,r.kt)("p",null,"When you specify the desired resource specs to create a volume, its Status will be shown as Pending on this page. When you hover over the Status icon, you will see a message that this ",(0,r.kt)("em",{parentName:"p"},"(This volume will be bound when its first consumer is created.)"),(0,r.kt)("br",{parentName:"p"}),"\n","This is according to the volume creation policy of the ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass")," used in the lab, which is local-path. ",(0,r.kt)("strong",{parentName:"p"},"This is not a problem situation."),(0,r.kt)("br",{parentName:"p"}),"\n","When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating-volume",src:o(9009).Z,width:"1572",height:"450"})))}p.isMDXComponent=!0},9009:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},9268:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},7932:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},541:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1229],{3905:(e,t,o)=>{o.d(t,{Zo:()=>c,kt:()=>h});var n=o(7294);function r(e,t,o){return t in e?Object.defineProperty(e,t,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[t]=o,e}function a(e,t){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),o.push.apply(o,n)}return o}function s(e){for(var t=1;t=0||(r[o]=e[o]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(r[o]=e[o])}return r}var l=n.createContext({}),u=function(e){var t=n.useContext(l),o=t;return e&&(o="function"==typeof e?e(t):s(s({},t),e)),o},c=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var o=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=u(o),m=r,h=d["".concat(l,".").concat(m)]||d[m]||p[m]||a;return o?n.createElement(h,s(s({ref:t},c),{},{components:o})):n.createElement(h,s({ref:t},c))}));function h(e,t){var o=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=o.length,s=new Array(a);s[0]=m;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var u=2;u{o.r(t),o.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var n=o(7462),r=(o(7294),o(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/volumes",id:"kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/en/docs/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/volumes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/en/docs/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/en/docs/kubeflow-dashboard-guide/experiments"}},l={},u=[{value:"Volumes",id:"volumes",level:2},{value:"Creating a Volume",id:"creating-a-volume",level:2}],c={toc:u},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"volumes"},"Volumes"),(0,r.kt)("p",null,"Next, let's click on the Volumes tab in the left of the Central Dashboard."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:o(9268).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"You will see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"volumes",src:o(541).Z,width:"1386",height:"382"})),(0,r.kt)("p",null,"Volumes tab provides the functionality to manage the Persistent Volume Claims (PVC) belonging to the current user's namespace in Kubernetes' Volume (Volume)."),(0,r.kt)("p",null,"By looking at the screenshot, you can see the information of the Volume created on the ",(0,r.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," page. It can be seen that the Storage Class of the Volume is set to local-path, which is the Default Storage Class installed at the time of Kubernetes cluster installation."),(0,r.kt)("p",null,"In addition, the Volumes page can be used if you want to create, view, or delete a new Volume in the user namespace."),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"creating-a-volume"},"Creating a Volume"),(0,r.kt)("p",null,"By clicking the ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," button at the top right, you can see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-volume",src:o(7932).Z,width:"1192",height:"934"})),(0,r.kt)("p",null,"You can create a volume by specifying its name, size, storage class, and access mode."),(0,r.kt)("p",null,"When you specify the desired resource specs to create a volume, its Status will be shown as Pending on this page. When you hover over the Status icon, you will see a message that this ",(0,r.kt)("em",{parentName:"p"},"(This volume will be bound when its first consumer is created.)"),(0,r.kt)("br",{parentName:"p"}),"\n","This is according to the volume creation policy of the ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass")," used in the lab, which is local-path. ",(0,r.kt)("strong",{parentName:"p"},"This is not a problem situation."),(0,r.kt)("br",{parentName:"p"}),"\n","When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating-volume",src:o(9009).Z,width:"1572",height:"450"})))}p.isMDXComponent=!0},9009:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},9268:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},7932:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},541:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file diff --git a/en/assets/js/52a462e1.9f608ae6.js b/en/assets/js/52a462e1.58ab6440.js similarity index 99% rename from en/assets/js/52a462e1.9f608ae6.js rename to en/assets/js/52a462e1.58ab6440.js index a759eecd..125c2533 100644 --- a/en/assets/js/52a462e1.9f608ae6.js +++ b/en/assets/js/52a462e1.58ab6440.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5497],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),c=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=c(n),k=o,m=d["".concat(l,".").concat(k)]||d[k]||p[k]||a;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[d]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/introduction",id:"version-1.0/prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/en/docs/1.0/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/introduction.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/en/docs/1.0/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/en/docs/1.0/prerequisites/docker/"}},l={},c=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"Docker & Kubernetes",id:"docker--kubernetes",level:2},{value:"Not a software but a product",id:"not-a-software-but--a-product",level:3},{value:"Docker",id:"docker",level:4},{value:"Kubernetes",id:"kubernetes",level:4},{value:"History of Open source",id:"history-of-open-source",level:3},{value:"Initial Docker & Kubernetes",id:"initial-docker--kubernetes",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"Current docker & kubernetes",id:"current-docker--kubernetes",level:4},{value:"References",id:"references",level:3}],u={toc:c},d="wrapper";function p(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,o.kt)("p",null,"To operationalize machine learning models, additional functionalities beyond model development are required."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Training Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Schedule management for model training commands"),(0,o.kt)("li",{parentName:"ul"},"Ensuring reproducibility of trained models"))),(0,o.kt)("li",{parentName:"ol"},"Deployment Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Traffic distribution"),(0,o.kt)("li",{parentName:"ul"},"Monitoring service failures"),(0,o.kt)("li",{parentName:"ul"},"Troubleshooting in case of failures")))),(0,o.kt)("p",null,"Fortunately, the software development field has already put a lot of thought and effort into addressing these needs. Therefore, when deploying machine learning models, leveraging the outcomes of these considerations can be highly beneficial. Docker and Kubernetes are two prominent software products widely used in MLOps to address these needs."),(0,o.kt)("h2",{id:"docker--kubernetes"},"Docker & Kubernetes"),(0,o.kt)("h3",{id:"not-a-software-but--a-product"},"Not a software but a product"),(0,o.kt)("p",null,"Docker and Kubernetes are representative software (products) that provide containerization and container orchestration functions respectively."),(0,o.kt)("h4",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker was the mainstream in the past, but its usage has been decreasing gradually with the addition of various paid policy.",(0,o.kt)("br",{parentName:"p"}),"\n","However, as of March 2022, it is still the most commonly used container virtualization software."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2019.png",src:n(6067).Z,width:"1600",height:"900"})),(0,o.kt)("center",null," [from sysdig 2019] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2021.png",src:n(5579).Z,width:"750",height:"437"})),(0,o.kt)("center",null," [from sysdig 2021] "),(0,o.kt)("h4",{id:"kubernetes"},"Kubernetes"),(0,o.kt)("p",null,"Kubernetes: Kubernetes is a product that has almost no comparison so far."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cncf-survey.png",src:n(6721).Z,width:"2048",height:"1317"})),(0,o.kt)("center",null," [from cncf survey] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"t4-ai.png",src:n(6207).Z,width:"926",height:"629"})),(0,o.kt)("center",null," [from t4.ai] "),(0,o.kt)("h3",{id:"history-of-open-source"},"History of Open source"),(0,o.kt)("h4",{id:"initial-docker--kubernetes"},"Initial Docker & Kubernetes"),(0,o.kt)("p",null,"At the beginning of Docker development, ",(0,o.kt)("strong",{parentName:"p"},"one package")," called Docker Engine contained multiple features such as API, CLI, networking, storage, etc., but it began to be ",(0,o.kt)("strong",{parentName:"p"},"divided one by one")," according to the philosophy of ",(0,o.kt)("strong",{parentName:"p"},"MSA"),".",(0,o.kt)("br",{parentName:"p"}),"\n","However, the initial Kubernetes included Docker Engine for container virtualization.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, whenever the Docker version was updated, the interface of Docker Engine changed and Kubernetes was greatly affected."),(0,o.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,o.kt)("p",null,"In order to alleviate such inconveniences, many groups interested in container technology such as Google have come together to start the Open Container Initiative (OCI) project to set standards for containers.",(0,o.kt)("br",{parentName:"p"}),"\n","Docker further separated its interface and developed Containerd, a Container Runtime that adheres to the OCI standard, and added an abstraction layer so that dockerd calls the API of Containerd."),(0,o.kt)("p",null,"In accordance with this flow, Kubernetes also now supports not only Docker, but any Container Runtime that adheres to the OCI standard and the specified specifications with the Container Runtime Interface (CRI) specification, starting from version 1.5. "),(0,o.kt)("h4",{id:"cri-o"},"CRI-O"),(0,o.kt)("p",null,"CRI-O is a container runtime developed by Red Hat, Intel, SUSE, and IBM, which adheres to the OCI standard + CRI specifications, specifically for Kubernetes."),(0,o.kt)("h4",{id:"current-docker--kubernetes"},"Current docker & kubernetes"),(0,o.kt)("p",null,"Currently, Docker and Kubernetes have been using Docker Engine as the default container runtime, but since Docker's API did not match the CRI specification (",(0,o.kt)("em",{parentName:"p"},"OCI follows"),"), Kubernetes developed and supported a ",(0,o.kt)("strong",{parentName:"p"},"dockershim")," to make Docker's API compatible with CRI, (",(0,o.kt)("em",{parentName:"p"},"it was a huge burden for Kubernetes, not for Docker"),"). This was ",(0,o.kt)("strong",{parentName:"p"},"deprecated from Kubernetes v1.20 and abandoned from v1.23"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"v1.23 will be released in December 2021")),(0,o.kt)("p",null,"So from Kubernetes v1.23, you can no longer use Docker natively.\nHowever, ",(0,o.kt)("strong",{parentName:"p"},"users are not much affected by this change")," because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of."),(0,o.kt)("h3",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,o.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}p.isMDXComponent=!0},6721:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},6067:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},5579:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},6207:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5497],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),c=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=c(n),k=o,m=d["".concat(l,".").concat(k)]||d[k]||p[k]||a;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[d]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/introduction",id:"version-1.0/prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/en/docs/1.0/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/introduction.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/en/docs/1.0/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/en/docs/1.0/prerequisites/docker/"}},l={},c=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"Docker & Kubernetes",id:"docker--kubernetes",level:2},{value:"Not a software but a product",id:"not-a-software-but--a-product",level:3},{value:"Docker",id:"docker",level:4},{value:"Kubernetes",id:"kubernetes",level:4},{value:"History of Open source",id:"history-of-open-source",level:3},{value:"Initial Docker & Kubernetes",id:"initial-docker--kubernetes",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"Current docker & kubernetes",id:"current-docker--kubernetes",level:4},{value:"References",id:"references",level:3}],u={toc:c},d="wrapper";function p(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,o.kt)("p",null,"To operationalize machine learning models, additional functionalities beyond model development are required."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Training Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Schedule management for model training commands"),(0,o.kt)("li",{parentName:"ul"},"Ensuring reproducibility of trained models"))),(0,o.kt)("li",{parentName:"ol"},"Deployment Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Traffic distribution"),(0,o.kt)("li",{parentName:"ul"},"Monitoring service failures"),(0,o.kt)("li",{parentName:"ul"},"Troubleshooting in case of failures")))),(0,o.kt)("p",null,"Fortunately, the software development field has already put a lot of thought and effort into addressing these needs. Therefore, when deploying machine learning models, leveraging the outcomes of these considerations can be highly beneficial. Docker and Kubernetes are two prominent software products widely used in MLOps to address these needs."),(0,o.kt)("h2",{id:"docker--kubernetes"},"Docker & Kubernetes"),(0,o.kt)("h3",{id:"not-a-software-but--a-product"},"Not a software but a product"),(0,o.kt)("p",null,"Docker and Kubernetes are representative software (products) that provide containerization and container orchestration functions respectively."),(0,o.kt)("h4",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker was the mainstream in the past, but its usage has been decreasing gradually with the addition of various paid policy.",(0,o.kt)("br",{parentName:"p"}),"\n","However, as of March 2022, it is still the most commonly used container virtualization software."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2019.png",src:n(6067).Z,width:"1600",height:"900"})),(0,o.kt)("center",null," [from sysdig 2019] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2021.png",src:n(5579).Z,width:"750",height:"437"})),(0,o.kt)("center",null," [from sysdig 2021] "),(0,o.kt)("h4",{id:"kubernetes"},"Kubernetes"),(0,o.kt)("p",null,"Kubernetes: Kubernetes is a product that has almost no comparison so far."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cncf-survey.png",src:n(6721).Z,width:"2048",height:"1317"})),(0,o.kt)("center",null," [from cncf survey] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"t4-ai.png",src:n(6207).Z,width:"926",height:"629"})),(0,o.kt)("center",null," [from t4.ai] "),(0,o.kt)("h3",{id:"history-of-open-source"},"History of Open source"),(0,o.kt)("h4",{id:"initial-docker--kubernetes"},"Initial Docker & Kubernetes"),(0,o.kt)("p",null,"At the beginning of Docker development, ",(0,o.kt)("strong",{parentName:"p"},"one package")," called Docker Engine contained multiple features such as API, CLI, networking, storage, etc., but it began to be ",(0,o.kt)("strong",{parentName:"p"},"divided one by one")," according to the philosophy of ",(0,o.kt)("strong",{parentName:"p"},"MSA"),".",(0,o.kt)("br",{parentName:"p"}),"\n","However, the initial Kubernetes included Docker Engine for container virtualization.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, whenever the Docker version was updated, the interface of Docker Engine changed and Kubernetes was greatly affected."),(0,o.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,o.kt)("p",null,"In order to alleviate such inconveniences, many groups interested in container technology such as Google have come together to start the Open Container Initiative (OCI) project to set standards for containers.",(0,o.kt)("br",{parentName:"p"}),"\n","Docker further separated its interface and developed Containerd, a Container Runtime that adheres to the OCI standard, and added an abstraction layer so that dockerd calls the API of Containerd."),(0,o.kt)("p",null,"In accordance with this flow, Kubernetes also now supports not only Docker, but any Container Runtime that adheres to the OCI standard and the specified specifications with the Container Runtime Interface (CRI) specification, starting from version 1.5. "),(0,o.kt)("h4",{id:"cri-o"},"CRI-O"),(0,o.kt)("p",null,"CRI-O is a container runtime developed by Red Hat, Intel, SUSE, and IBM, which adheres to the OCI standard + CRI specifications, specifically for Kubernetes."),(0,o.kt)("h4",{id:"current-docker--kubernetes"},"Current docker & kubernetes"),(0,o.kt)("p",null,"Currently, Docker and Kubernetes have been using Docker Engine as the default container runtime, but since Docker's API did not match the CRI specification (",(0,o.kt)("em",{parentName:"p"},"OCI follows"),"), Kubernetes developed and supported a ",(0,o.kt)("strong",{parentName:"p"},"dockershim")," to make Docker's API compatible with CRI, (",(0,o.kt)("em",{parentName:"p"},"it was a huge burden for Kubernetes, not for Docker"),"). This was ",(0,o.kt)("strong",{parentName:"p"},"deprecated from Kubernetes v1.20 and abandoned from v1.23"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"v1.23 will be released in December 2021")),(0,o.kt)("p",null,"So from Kubernetes v1.23, you can no longer use Docker natively.\nHowever, ",(0,o.kt)("strong",{parentName:"p"},"users are not much affected by this change")," because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of."),(0,o.kt)("h3",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,o.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}p.isMDXComponent=!0},6721:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},6067:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},5579:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},6207:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file diff --git a/en/assets/js/52b91c1d.00a55d30.js b/en/assets/js/52b91c1d.852fb386.js similarity index 99% rename from en/assets/js/52b91c1d.00a55d30.js rename to en/assets/js/52b91c1d.852fb386.js index fbc357b2..803cdb62 100644 --- a/en/assets/js/52b91c1d.00a55d30.js +++ b/en/assets/js/52b91c1d.852fb386.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6863],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,s=e.originalType,i=e.parentName,p=o(e,["components","mdxType","originalType","parentName"]),c=d(t),m=l,g=c["".concat(i,".").concat(m)]||c[m]||u[m]||s;return t?a.createElement(g,r(r({ref:n},p),{},{components:t})):a.createElement(g,r({ref:n},p))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var s=t.length,r=new Array(s);r[0]=m;var o={};for(var i in n)hasOwnProperty.call(n,i)&&(o[i]=n[i]);o.originalType=e,o[c]="string"==typeof e?e:l,r[1]=o;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>o,toc:()=>d});var a=t(7462),l=(t(7294),t(3905));const s={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},r=void 0,o={unversionedId:"api-deployment/seldon-iris",id:"api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/en/docs/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-iris.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/en/docs/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/en/docs/api-deployment/seldon-pg"}},i={},d=[{value:"Deploy with SeldonDeployment",id:"deploy-with-seldondeployment",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:4},{value:"2. Define Spec",id:"2-define-spec",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Accessing Swagger",id:"1-accessing-swagger",level:3},{value:"2. Selecting Swagger Predictions",id:"2-selecting-swagger-predictions",level:3},{value:"3. Choosing Try it out",id:"3-choosing-try-it-out",level:3},{value:"4. Inputting data in the Request body",id:"4-inputting-data-in-the-request-body",level:3},{value:"5. Check the inference results",id:"5-check-the-inference-results",level:3},{value:"Using CLI",id:"using-cli",level:2}],p={toc:d},c="wrapper";function u(e){let{components:n,...s}=e;return(0,l.kt)(c,(0,a.Z)({},p,s,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"deploy-with-seldondeployment"},"Deploy with SeldonDeployment"),(0,l.kt)("p",null,"Let's deploy our trained model as an API using SeldonDeployment. SeldonDeployment is a custom resource definition (CRD) defined to deploy models as REST/gRPC servers on Kubernetes."),(0,l.kt)("h4",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"We will conduct the SeldonDeployment related practice in a new namespace called seldon-deploy. After creating the namespace, set seldon-deploy as the current namespace."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-define-spec"},"2. Define Spec"),(0,l.kt)("p",null,"Generate a yaml file to deploy SeldonDeployment.\nIn this page, we will use a publicly available iris model.\nBecause this iris model is trained through the sklearn framework, we use SKLEARN_SERVER."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"Deploy yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"Check if the deployment was successful through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"If everyone runs, similar results will be printed."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"Now, send a inference request to the deployed model to get the inference result. The API created by the SeldonDeployment follows the following rule:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-seldon"},"Since Seldon Core was installed with Ambassador as the Ingress Controller"),", all APIs created by SeldonDeployment can be requested through the Ambassador Ingress gateway."),(0,l.kt)("p",null,"Therefore, first set the url of the Ambassador Ingress Gateway as an environment variable."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"Check the set url."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"It should be outputted similarly as follows, and if set through the cloud, you can check that internal IP address is set."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"This refers to the ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," where the SeldonDeployment is deployed and used to define the values defined in the metadata when defining the spec."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"In the example above, ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," is seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," is sklearn."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"In SeldonDeployment, the commonly used ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name")," has two options:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"The detailed usage of each method is explained below."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"First, let's explore how to use the doc method, which allows access to the Swagger generated by Seldon."),(0,l.kt)("h3",{id:"1-accessing-swagger"},"1. Accessing Swagger"),(0,l.kt)("p",null,"According to the provided ingress URL rules, you can access the Swagger documentation using the following URL:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(1885).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-selecting-swagger-predictions"},"2. Selecting Swagger Predictions"),(0,l.kt)("p",null,"In the Swagger UI, select the ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," endpoint."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(2260).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-choosing-try-it-out"},"3. Choosing ",(0,l.kt)("em",{parentName:"h3"},"Try it out")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(982).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-inputting-data-in-the-request-body"},"4. Inputting data in the Request body"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(6070).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"Enter the following data into the Request body."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-check-the-inference-results"},"5. Check the inference results"),(0,l.kt)("p",null,"You can click the ",(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," button to obtain the inference result."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(8481).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"If everything is executed successfully, you will obtain the following inference result."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"Also, you can use http client CLI tools such as curl to make API requests.\nFor example, requesting ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions")," as follows"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"You can confirm that the following response is outputted normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}u.isMDXComponent=!0},1885:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},2260:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},982:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},6070:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},8481:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6863],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,s=e.originalType,i=e.parentName,p=o(e,["components","mdxType","originalType","parentName"]),c=d(t),m=l,g=c["".concat(i,".").concat(m)]||c[m]||u[m]||s;return t?a.createElement(g,r(r({ref:n},p),{},{components:t})):a.createElement(g,r({ref:n},p))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var s=t.length,r=new Array(s);r[0]=m;var o={};for(var i in n)hasOwnProperty.call(n,i)&&(o[i]=n[i]);o.originalType=e,o[c]="string"==typeof e?e:l,r[1]=o;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>u,frontMatter:()=>s,metadata:()=>o,toc:()=>d});var a=t(7462),l=(t(7294),t(3905));const s={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},r=void 0,o={unversionedId:"api-deployment/seldon-iris",id:"api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/en/docs/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-iris.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/en/docs/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/en/docs/api-deployment/seldon-pg"}},i={},d=[{value:"Deploy with SeldonDeployment",id:"deploy-with-seldondeployment",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:4},{value:"2. Define Spec",id:"2-define-spec",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Accessing Swagger",id:"1-accessing-swagger",level:3},{value:"2. Selecting Swagger Predictions",id:"2-selecting-swagger-predictions",level:3},{value:"3. Choosing Try it out",id:"3-choosing-try-it-out",level:3},{value:"4. Inputting data in the Request body",id:"4-inputting-data-in-the-request-body",level:3},{value:"5. Check the inference results",id:"5-check-the-inference-results",level:3},{value:"Using CLI",id:"using-cli",level:2}],p={toc:d},c="wrapper";function u(e){let{components:n,...s}=e;return(0,l.kt)(c,(0,a.Z)({},p,s,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"deploy-with-seldondeployment"},"Deploy with SeldonDeployment"),(0,l.kt)("p",null,"Let's deploy our trained model as an API using SeldonDeployment. SeldonDeployment is a custom resource definition (CRD) defined to deploy models as REST/gRPC servers on Kubernetes."),(0,l.kt)("h4",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"We will conduct the SeldonDeployment related practice in a new namespace called seldon-deploy. After creating the namespace, set seldon-deploy as the current namespace."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-define-spec"},"2. Define Spec"),(0,l.kt)("p",null,"Generate a yaml file to deploy SeldonDeployment.\nIn this page, we will use a publicly available iris model.\nBecause this iris model is trained through the sklearn framework, we use SKLEARN_SERVER."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"Deploy yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"Check if the deployment was successful through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"If everyone runs, similar results will be printed."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"Now, send a inference request to the deployed model to get the inference result. The API created by the SeldonDeployment follows the following rule:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-seldon"},"Since Seldon Core was installed with Ambassador as the Ingress Controller"),", all APIs created by SeldonDeployment can be requested through the Ambassador Ingress gateway."),(0,l.kt)("p",null,"Therefore, first set the url of the Ambassador Ingress Gateway as an environment variable."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"Check the set url."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"It should be outputted similarly as follows, and if set through the cloud, you can check that internal IP address is set."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"This refers to the ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," where the SeldonDeployment is deployed and used to define the values defined in the metadata when defining the spec."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"In the example above, ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," is seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," is sklearn."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"In SeldonDeployment, the commonly used ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name")," has two options:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"The detailed usage of each method is explained below."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"First, let's explore how to use the doc method, which allows access to the Swagger generated by Seldon."),(0,l.kt)("h3",{id:"1-accessing-swagger"},"1. Accessing Swagger"),(0,l.kt)("p",null,"According to the provided ingress URL rules, you can access the Swagger documentation using the following URL:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(1885).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-selecting-swagger-predictions"},"2. Selecting Swagger Predictions"),(0,l.kt)("p",null,"In the Swagger UI, select the ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," endpoint."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(2260).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-choosing-try-it-out"},"3. Choosing ",(0,l.kt)("em",{parentName:"h3"},"Try it out")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(982).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-inputting-data-in-the-request-body"},"4. Inputting data in the Request body"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(6070).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"Enter the following data into the Request body."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-check-the-inference-results"},"5. Check the inference results"),(0,l.kt)("p",null,"You can click the ",(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," button to obtain the inference result."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(8481).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"If everything is executed successfully, you will obtain the following inference result."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"Also, you can use http client CLI tools such as curl to make API requests.\nFor example, requesting ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions")," as follows"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"You can confirm that the following response is outputted normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}u.isMDXComponent=!0},1885:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},2260:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},982:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},6070:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},8481:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file diff --git a/en/assets/js/55e75476.228cf0b1.js b/en/assets/js/55e75476.a4725dfe.js similarity index 99% rename from en/assets/js/55e75476.228cf0b1.js rename to en/assets/js/55e75476.a4725dfe.js index 1f7a7ced..56d1042c 100644 --- a/en/assets/js/55e75476.228cf0b1.js +++ b/en/assets/js/55e75476.a4725dfe.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4141],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var i=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=i.createContext({}),d=function(e){var t=i.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return i.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},m=i.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=d(n),m=a,f=u["".concat(s,".").concat(m)]||u[m]||c[m]||o;return n?i.createElement(f,r(r({ref:t},p),{},{components:n})):i.createElement(f,r({ref:t},p))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,r=new Array(o);r[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:a,r[1]=l;for(var d=2;d{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>d});var i=n(7462),a=(n(7294),n(3905));const o={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},r=void 0,l={unversionedId:"introduction/component",id:"introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/en/docs/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/en/docs/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/en/docs/introduction/why_kubernetes"}},s={},d=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model Training",id:"3-model-training",level:3},{value:"4. Model Evaluation",id:"4-model-evaluation",level:3},{value:"5. Model Serving",id:"5-model-serving",level:3},{value:"6. Online Experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],p={toc:d},u="wrapper";function c(e){let{components:t,...o}=e;return(0,a.kt)(u,(0,i.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,a.kt)("p",null,"Google's white paper ","[Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning]"," published in May 2021 mentions the following core functionalities of MLOps: "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlops-component",src:n(6540).Z,width:"2352",height:"1890"})),(0,a.kt)("p",null,"Let's look at what each feature does."),(0,a.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,a.kt)("p",null,"Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Integration with version control tools like Git and a notebook (Jupyter Notebook) environment"),(0,a.kt)("li",{parentName:"ul"},"Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics"),(0,a.kt)("li",{parentName:"ul"},"Data and model analysis and visualization capabilities")),(0,a.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,a.kt)("p",null,"Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Data connectors compatible with various data sources and services"),(0,a.kt)("li",{parentName:"ul"},"Data encoders and decoders compatible with different data formats"),(0,a.kt)("li",{parentName:"ul"},"Data transformation and feature engineering capabilities for different data types"),(0,a.kt)("li",{parentName:"ul"},"Scalable batch and streaming data processing capabilities for training and serving")),(0,a.kt)("h3",{id:"3-model-training"},"3. Model Training"),(0,a.kt)("p",null,"Model Training offers functionalities to efficiently execute algorithms for model training:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Environment provisioning for ML framework execution"),(0,a.kt)("li",{parentName:"ul"},"Distributed training environment for multiple GPUs and distributed training"),(0,a.kt)("li",{parentName:"ul"},"Hyperparameter tuning and optimization capabilities")),(0,a.kt)("h3",{id:"4-model-evaluation"},"4. Model Evaluation"),(0,a.kt)("p",null,"Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model performance evaluation on evaluation datasets"),(0,a.kt)("li",{parentName:"ul"},"Tracking prediction performance across different continuous training runs"),(0,a.kt)("li",{parentName:"ul"},"Comparison and visualization of performance between different models"),(0,a.kt)("li",{parentName:"ul"},"Model output interpretation using interpretable AI techniques")),(0,a.kt)("h3",{id:"5-model-serving"},"5. Model Serving"),(0,a.kt)("p",null,"Model serving offers functionalities to deploy and serve models in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Low-latency and high-availability inference capabilities"),(0,a.kt)("li",{parentName:"ul"},"Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)"),(0,a.kt)("li",{parentName:"ul"},"Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results"),(0,a.kt)("li",{parentName:"ul"},"Autoscaling capabilities to handle spiking inference requests"),(0,a.kt)("li",{parentName:"ul"},"Logging of inference requests and results")),(0,a.kt)("h3",{id:"6-online-experimentation"},"6. Online Experimentation"),(0,a.kt)("p",null,"Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Canary and shadow deployment features"),(0,a.kt)("li",{parentName:"ul"},"A/B testing capabilities"),(0,a.kt)("li",{parentName:"ul"},"Multi-armed bandit testing functionality")),(0,a.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,a.kt)("p",null,"Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates."),(0,a.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,a.kt)("p",null,"ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Pipeline execution through various event sources"),(0,a.kt)("li",{parentName:"ul"},"ML metadata tracking and integration for pipeline parameter and artifact management"),(0,a.kt)("li",{parentName:"ul"},"Support for built-in components for common ML tasks and user-defined components"),(0,a.kt)("li",{parentName:"ul"},"Provisioning of different execution environments")),(0,a.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,a.kt)("p",null,"The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Registration, tracking, and versioning of trained and deployed models"),(0,a.kt)("li",{parentName:"ul"},"Storage of information about the required data and runtime packages for deployment")),(0,a.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Sharing, search, reuse, and versioning capabilities for datasets"),(0,a.kt)("li",{parentName:"ul"},"Real-time processing and low-latency serving capabilities for event streaming and online inference tasks"),(0,a.kt)("li",{parentName:"ul"},"Support for various types of data, such as images, text, and tabular data")),(0,a.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,a.kt)("p",null,"In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"History management for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Tracking and sharing of experiments and pipeline parameter configurations"),(0,a.kt)("li",{parentName:"ul"},"Storage, access, visualization, and download capabilities for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Integration with other MLOps functionalities")))}c.isMDXComponent=!0},6540:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4141],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var i=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=i.createContext({}),d=function(e){var t=i.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return i.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},m=i.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=d(n),m=a,f=u["".concat(s,".").concat(m)]||u[m]||c[m]||o;return n?i.createElement(f,r(r({ref:t},p),{},{components:n})):i.createElement(f,r({ref:t},p))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,r=new Array(o);r[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:a,r[1]=l;for(var d=2;d{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>d});var i=n(7462),a=(n(7294),n(3905));const o={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},r=void 0,l={unversionedId:"introduction/component",id:"introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/en/docs/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/component.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/en/docs/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/en/docs/introduction/why_kubernetes"}},s={},d=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model Training",id:"3-model-training",level:3},{value:"4. Model Evaluation",id:"4-model-evaluation",level:3},{value:"5. Model Serving",id:"5-model-serving",level:3},{value:"6. Online Experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],p={toc:d},u="wrapper";function c(e){let{components:t,...o}=e;return(0,a.kt)(u,(0,i.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,a.kt)("p",null,"Google's white paper ","[Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning]"," published in May 2021 mentions the following core functionalities of MLOps: "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlops-component",src:n(6540).Z,width:"2352",height:"1890"})),(0,a.kt)("p",null,"Let's look at what each feature does."),(0,a.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,a.kt)("p",null,"Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Integration with version control tools like Git and a notebook (Jupyter Notebook) environment"),(0,a.kt)("li",{parentName:"ul"},"Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics"),(0,a.kt)("li",{parentName:"ul"},"Data and model analysis and visualization capabilities")),(0,a.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,a.kt)("p",null,"Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Data connectors compatible with various data sources and services"),(0,a.kt)("li",{parentName:"ul"},"Data encoders and decoders compatible with different data formats"),(0,a.kt)("li",{parentName:"ul"},"Data transformation and feature engineering capabilities for different data types"),(0,a.kt)("li",{parentName:"ul"},"Scalable batch and streaming data processing capabilities for training and serving")),(0,a.kt)("h3",{id:"3-model-training"},"3. Model Training"),(0,a.kt)("p",null,"Model Training offers functionalities to efficiently execute algorithms for model training:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Environment provisioning for ML framework execution"),(0,a.kt)("li",{parentName:"ul"},"Distributed training environment for multiple GPUs and distributed training"),(0,a.kt)("li",{parentName:"ul"},"Hyperparameter tuning and optimization capabilities")),(0,a.kt)("h3",{id:"4-model-evaluation"},"4. Model Evaluation"),(0,a.kt)("p",null,"Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model performance evaluation on evaluation datasets"),(0,a.kt)("li",{parentName:"ul"},"Tracking prediction performance across different continuous training runs"),(0,a.kt)("li",{parentName:"ul"},"Comparison and visualization of performance between different models"),(0,a.kt)("li",{parentName:"ul"},"Model output interpretation using interpretable AI techniques")),(0,a.kt)("h3",{id:"5-model-serving"},"5. Model Serving"),(0,a.kt)("p",null,"Model serving offers functionalities to deploy and serve models in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Low-latency and high-availability inference capabilities"),(0,a.kt)("li",{parentName:"ul"},"Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)"),(0,a.kt)("li",{parentName:"ul"},"Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results"),(0,a.kt)("li",{parentName:"ul"},"Autoscaling capabilities to handle spiking inference requests"),(0,a.kt)("li",{parentName:"ul"},"Logging of inference requests and results")),(0,a.kt)("h3",{id:"6-online-experimentation"},"6. Online Experimentation"),(0,a.kt)("p",null,"Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Canary and shadow deployment features"),(0,a.kt)("li",{parentName:"ul"},"A/B testing capabilities"),(0,a.kt)("li",{parentName:"ul"},"Multi-armed bandit testing functionality")),(0,a.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,a.kt)("p",null,"Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates."),(0,a.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,a.kt)("p",null,"ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Pipeline execution through various event sources"),(0,a.kt)("li",{parentName:"ul"},"ML metadata tracking and integration for pipeline parameter and artifact management"),(0,a.kt)("li",{parentName:"ul"},"Support for built-in components for common ML tasks and user-defined components"),(0,a.kt)("li",{parentName:"ul"},"Provisioning of different execution environments")),(0,a.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,a.kt)("p",null,"The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Registration, tracking, and versioning of trained and deployed models"),(0,a.kt)("li",{parentName:"ul"},"Storage of information about the required data and runtime packages for deployment")),(0,a.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Sharing, search, reuse, and versioning capabilities for datasets"),(0,a.kt)("li",{parentName:"ul"},"Real-time processing and low-latency serving capabilities for event streaming and online inference tasks"),(0,a.kt)("li",{parentName:"ul"},"Support for various types of data, such as images, text, and tabular data")),(0,a.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,a.kt)("p",null,"In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"History management for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Tracking and sharing of experiments and pipeline parameter configurations"),(0,a.kt)("li",{parentName:"ul"},"Storage, access, visualization, and download capabilities for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Integration with other MLOps functionalities")))}c.isMDXComponent=!0},6540:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file diff --git a/en/assets/js/56b79ddf.ee67b1c4.js b/en/assets/js/56b79ddf.d4f1c267.js similarity index 98% rename from en/assets/js/56b79ddf.ee67b1c4.js rename to en/assets/js/56b79ddf.d4f1c267.js index fbb748fa..40aa6a09 100644 --- a/en/assets/js/56b79ddf.ee67b1c4.js +++ b/en/assets/js/56b79ddf.d4f1c267.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2265],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),p=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=p(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=p(n),m=a,f=c["".concat(i,".").concat(m)]||c[m]||u[m]||o;return n?r.createElement(f,l(l({ref:t},d),{},{components:n})):r.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:a,l[1]=s;for(var p=2;p{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var r=n(7462),a=(n(7294),n(3905));const o={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,s={unversionedId:"api-deployment/seldon-pg",id:"api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/en/docs/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/en/docs/api-deployment/seldon-fields"}},i={},p=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"Dashboard",id:"dashboard",level:3},{value:"Request API",id:"request-api",level:3}],d={toc:p},c="wrapper";function u(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,r.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,a.kt)("p",null,"Now, let's perform repeated API requests with the SeldonDeployment we created on the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/api-deployment/seldon-iris"},"previous page")," and check if the dashboard changes."),(0,a.kt)("h3",{id:"dashboard"},"Dashboard"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-pg"},"Forward the dashboard created earlier"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,a.kt)("h3",{id:"request-api"},"Request API"),(0,a.kt)("p",null,"Request ",(0,a.kt)("strong",{parentName:"p"},"repeated")," to the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/api-deployment/seldon-iris#using-cli"},"previously created Seldon Deployment"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,a.kt)("p",null,"Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from ",(0,a.kt)("inlineCode",{parentName:"p"},"0 ops"),"."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"repeat-raise.png",src:n(9506).Z,width:"5016",height:"2826"})),(0,a.kt)("p",null,"This confirms that Prometheus and Grafana have been successfully installed and configured."))}u.isMDXComponent=!0},9506:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2265],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>f});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var i=r.createContext({}),p=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},d=function(e){var t=p(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},m=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,i=e.parentName,d=s(e,["components","mdxType","originalType","parentName"]),c=p(n),m=a,f=c["".concat(i,".").concat(m)]||c[m]||u[m]||o;return n?r.createElement(f,l(l({ref:t},d),{},{components:n})):r.createElement(f,l({ref:t},d))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[c]="string"==typeof e?e:a,l[1]=s;for(var p=2;p{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>u,frontMatter:()=>o,metadata:()=>s,toc:()=>p});var r=n(7462),a=(n(7294),n(3905));const o={title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:new Date("2021-12-24T00:00:00.000Z"),lastmod:new Date("2021-12-24T00:00:00.000Z"),contributors:["Jongseob Jeon"]},l=void 0,s={unversionedId:"api-deployment/seldon-pg",id:"api-deployment/seldon-pg",title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-pg.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-pg",permalink:"/en/docs/api-deployment/seldon-pg",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-pg.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon Monitoring",description:"Prometheus & Grafana \ud655\uc778\ud558\uae30",sidebar_position:3,date:"2021-12-24T00:00:00.000Z",lastmod:"2021-12-24T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/api-deployment/seldon-iris"},next:{title:"4. Seldon Fields",permalink:"/en/docs/api-deployment/seldon-fields"}},i={},p=[{value:"Grafana & Prometheus",id:"grafana--prometheus",level:2},{value:"Dashboard",id:"dashboard",level:3},{value:"Request API",id:"request-api",level:3}],d={toc:p},c="wrapper";function u(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,r.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"grafana--prometheus"},"Grafana & Prometheus"),(0,a.kt)("p",null,"Now, let's perform repeated API requests with the SeldonDeployment we created on the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/api-deployment/seldon-iris"},"previous page")," and check if the dashboard changes."),(0,a.kt)("h3",{id:"dashboard"},"Dashboard"),(0,a.kt)("p",null,(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-pg"},"Forward the dashboard created earlier"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80\n")),(0,a.kt)("h3",{id:"request-api"},"Request API"),(0,a.kt)("p",null,"Request ",(0,a.kt)("strong",{parentName:"p"},"repeated")," to the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/api-deployment/seldon-iris#using-cli"},"previously created Seldon Deployment"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,a.kt)("p",null,"Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from ",(0,a.kt)("inlineCode",{parentName:"p"},"0 ops"),"."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"repeat-raise.png",src:n(9506).Z,width:"5016",height:"2826"})),(0,a.kt)("p",null,"This confirms that Prometheus and Grafana have been successfully installed and configured."))}u.isMDXComponent=!0},9506:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/repeat-raise-60a3d043d2ac70549160aa936b4bed46.png"}}]); \ No newline at end of file diff --git a/en/assets/js/593df1f8.d96bde14.js b/en/assets/js/593df1f8.351d9a17.js similarity index 99% rename from en/assets/js/593df1f8.d96bde14.js rename to en/assets/js/593df1f8.351d9a17.js index 88461f33..b440572d 100644 --- a/en/assets/js/593df1f8.d96bde14.js +++ b/en/assets/js/593df1f8.351d9a17.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4994],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),m=p(a),d=l,k=m["".concat(s,".").concat(d)]||m[d]||c[d]||r;return a?n.createElement(k,o(o({ref:t},u),{},{components:a})):n.createElement(k,o({ref:t},u))}));function k(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,o=new Array(r);o[0]=d;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:l,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/en/docs/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],u={toc:p},m="wrapper";function c(e){let{components:t,...a}=e;return(0,l.kt)(m,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,l.kt)("p",null,"On this page, we will explain how to install the modules that will be used on the cluster from the client nodes.",(0,l.kt)("br",{parentName:"p"}),"\n","All the processes introduced here will be done on the ",(0,l.kt)("strong",{parentName:"p"},"client nodes"),"."),(0,l.kt)("h2",{id:"helm"},"Helm"),(0,l.kt)("p",null,"Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download Helm version 3.7.1 into the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS refer to the ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"official website")," for the download path of the binary that matches the OS and CPU of your client node."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip the file to use helm and move the file to its desired location."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check to see if the installation was successful:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,l.kt)("p",{parentName:"li"},"Environment variables:"),(0,l.kt)("table",{parentName:"li"},(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,l.kt)("p",{parentName:"li"},"..."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre"},"")))),(0,l.kt)("h2",{id:"kustomize"},"Kustomize"),(0,l.kt)("p",null,"Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download the binary version of kustomize v3.10.0 in the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS can be downloaded from ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0")," after checking."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip to use kustomize, and change the file location. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check if it is installed correctly."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,l.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following messages, it means that the installation was successful: "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")))),(0,l.kt)("p",null,"If successful, it will display the following output:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n")),(0,l.kt)("ol",{start:4},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Execute the following command to change the default storage class:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,l.kt)("p",{parentName:"li"},"If the command is successful, the following output will be displayed:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Verify that the default storage class has been set:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,l.kt)("p",{parentName:"li"},"Check if there is a storage class with the name ",(0,l.kt)("inlineCode",{parentName:"p"},"local-path (default)")," in the NAME column:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4994],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),m=p(a),d=l,k=m["".concat(s,".").concat(d)]||m[d]||c[d]||r;return a?n.createElement(k,o(o({ref:t},u),{},{components:a})):n.createElement(k,o({ref:t},u))}));function k(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,o=new Array(r);o[0]=d;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:l,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/en/docs/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],u={toc:p},m="wrapper";function c(e){let{components:t,...a}=e;return(0,l.kt)(m,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,l.kt)("p",null,"On this page, we will explain how to install the modules that will be used on the cluster from the client nodes.",(0,l.kt)("br",{parentName:"p"}),"\n","All the processes introduced here will be done on the ",(0,l.kt)("strong",{parentName:"p"},"client nodes"),"."),(0,l.kt)("h2",{id:"helm"},"Helm"),(0,l.kt)("p",null,"Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download Helm version 3.7.1 into the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS refer to the ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"official website")," for the download path of the binary that matches the OS and CPU of your client node."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip the file to use helm and move the file to its desired location."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check to see if the installation was successful:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,l.kt)("p",{parentName:"li"},"Environment variables:"),(0,l.kt)("table",{parentName:"li"},(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,l.kt)("p",{parentName:"li"},"..."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre"},"")))),(0,l.kt)("h2",{id:"kustomize"},"Kustomize"),(0,l.kt)("p",null,"Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download the binary version of kustomize v3.10.0 in the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS can be downloaded from ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0")," after checking."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip to use kustomize, and change the file location. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check if it is installed correctly."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,l.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following messages, it means that the installation was successful: "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")))),(0,l.kt)("p",null,"If successful, it will display the following output:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n")),(0,l.kt)("ol",{start:4},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Execute the following command to change the default storage class:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,l.kt)("p",{parentName:"li"},"If the command is successful, the following output will be displayed:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Verify that the default storage class has been set:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,l.kt)("p",{parentName:"li"},"Check if there is a storage class with the name ",(0,l.kt)("inlineCode",{parentName:"p"},"local-path (default)")," in the NAME column:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/5ccc0acb.de35c7a6.js b/en/assets/js/5ccc0acb.6f7eb5a1.js similarity index 98% rename from en/assets/js/5ccc0acb.de35c7a6.js rename to en/assets/js/5ccc0acb.6f7eb5a1.js index 18c0d3fd..b39d93fd 100644 --- a/en/assets/js/5ccc0acb.de35c7a6.js +++ b/en/assets/js/5ccc0acb.6f7eb5a1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5101],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),p=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=p(n),d=o,f=u["".concat(s,".").concat(d)]||u[d]||m[d]||a;return n?r.createElement(f,i(i({ref:t},c),{},{components:n})):r.createElement(f,i({ref:t},c))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>p});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"version-1.0/kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/en/docs/1.0/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-requirements.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/en/docs/1.0/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/en/docs/1.0/kubeflow/basic-component"}},s={},p=[],c={toc:p},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. Python Virtual Environment")," and install the packages on the ",(0,o.kt)("strong",{parentName:"p"},"client node"),"."),(0,o.kt)("p",null,"The packages and versions required for the practice are as follows:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,"Activate the ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"Python virtual environment")," created in the previous section."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"We are proceeding with the package installation."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5101],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),p=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},c=function(e){var t=p(e.components);return r.createElement(s.Provider,{value:t},e.children)},u="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),u=p(n),d=o,f=u["".concat(s,".").concat(d)]||u[d]||m[d]||a;return n?r.createElement(f,i(i({ref:t},c),{},{components:n})):r.createElement(f,i({ref:t},c))}));function f(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>m,frontMatter:()=>a,metadata:()=>l,toc:()=>p});var r=n(7462),o=(n(7294),n(3905));const a={title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},i=void 0,l={unversionedId:"kubeflow/basic-requirements",id:"version-1.0/kubeflow/basic-requirements",title:"3. Install Requirements",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-requirements.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-requirements",permalink:"/en/docs/1.0/kubeflow/basic-requirements",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-requirements.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Requirements",description:"",sidebar_position:3,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"2. Kubeflow Concepts",permalink:"/en/docs/1.0/kubeflow/kubeflow-concepts"},next:{title:"4. Component - Write",permalink:"/en/docs/1.0/kubeflow/basic-component"}},s={},p=[],c={toc:p},u="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(u,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to ",(0,o.kt)("a",{parentName:"p",href:"../appendix/pyenv"},"Appendix 1. Python Virtual Environment")," and install the packages on the ",(0,o.kt)("strong",{parentName:"p"},"client node"),"."),(0,o.kt)("p",null,"The packages and versions required for the practice are as follows:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("p",{parentName:"li"},"requirements.txt"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kfp==1.8.9\nscikit-learn==1.0.1\nmlflow==1.21.0\npandas==1.3.4\ndill==0.3.4\n")))),(0,o.kt)("p",null,"Activate the ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/appendix/pyenv#python-%EA%B0%80%EC%83%81%ED%99%98%EA%B2%BD-%EC%83%9D%EC%84%B1"},"Python virtual environment")," created in the previous section."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,o.kt)("p",null,"We are proceeding with the package installation."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"pip3 install -U pip\npip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/5d928751.1f66856a.js b/en/assets/js/5d928751.e5b86cce.js similarity index 99% rename from en/assets/js/5d928751.1f66856a.js rename to en/assets/js/5d928751.e5b86cce.js index 30c287b5..59f4134a 100644 --- a/en/assets/js/5d928751.1f66856a.js +++ b/en/assets/js/5d928751.e5b86cce.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1032],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>_});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var l=a.createContext({}),u=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},s=function(e){var n=u(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,p=e.originalType,l=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),m=u(t),c=i,_=m["".concat(l,".").concat(c)]||m[c]||d[c]||p;return t?a.createElement(_,r(r({ref:n},s),{},{components:t})):a.createElement(_,r({ref:n},s))}));function _(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var p=t.length,r=new Array(p);r[0]=c;var o={};for(var l in n)hasOwnProperty.call(n,l)&&(o[l]=n[l]);o.originalType=e,o[m]="string"==typeof e?e:i,r[1]=o;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>r,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>u});var a=t(7462),i=(t(7294),t(3905));const p={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},r=void 0,o={unversionedId:"kubeflow/advanced-run",id:"version-1.0/kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/en/docs/1.0/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/en/docs/1.0/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/en/docs/1.0/kubeflow/advanced-mlflow"}},l={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],s={toc:u},m="wrapper";function d(e){let{components:n,...p}=e;return(0,i.kt)(m,(0,a.Z)({},s,p,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"Click Run Result and you will see three tabs:\nGraph, Run Output, and Config."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-0.png",src:t(1842).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"graph"},"Graph"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-1.png",src:t(275).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the graph, if you click on the run component, you can check the running information of the component."),(0,i.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,i.kt)("p",null,"The Input/Output tab allows you to view and download the Configurations, Input, and Output Artifacts used in the components."),(0,i.kt)("h3",{id:"logs"},"Logs"),(0,i.kt)("p",null,"In the Logs tab, you can view all the stdout output generated during the execution of the Python code.\nHowever, pods are deleted after a certain period of time, so you may not be able to view them in this tab after a certain time.\nIn that case, you can check them in the main-logs section of the Output artifacts."),(0,i.kt)("h3",{id:"visualizations"},"Visualizations"),(0,i.kt)("p",null,"The Visualizations tab displays plots generated by the components."),(0,i.kt)("p",null,"To generate a plot, you can save the desired values as an argument using ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")'),". The plot should be in HTML format.\nThe conversion process is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,i.kt)("p",null,"If written in pipeline, it will be like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,i.kt)("p",null,"If you run this script and check the resulting ",(0,i.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),", you will see the following."),(0,i.kt)("p",null,(0,i.kt)("details",null,(0,i.kt)("summary",null,"plot_pipeline.yaml"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,i.kt)("p",null,"After running, click Visualization."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-5.png",src:t(9423).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-output"},"Run output"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-2.png",src:t(803).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric)."),(0,i.kt)("p",null,"To show the evaluation index (Metric), you can save the name and value you want to show in the ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument in json format. For example, you can write it like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,i.kt)("p",null,"We will add a component to generate evaluation metrics to the pipeline created in the ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-pipeline"},"Pipeline")," and execute it. The whole pipeline is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,i.kt)("p",null,"After execution, click Run Output and it will show like this."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-4.png",src:t(5665).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"config"},"Config"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-3.png",src:t(896).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the Config tab, you can view all the values received as pipeline configurations."))}d.isMDXComponent=!0},1842:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},275:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},803:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},896:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},5665:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},9423:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1032],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>_});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var l=a.createContext({}),u=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},s=function(e){var n=u(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,p=e.originalType,l=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),m=u(t),c=i,_=m["".concat(l,".").concat(c)]||m[c]||d[c]||p;return t?a.createElement(_,r(r({ref:n},s),{},{components:t})):a.createElement(_,r({ref:n},s))}));function _(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var p=t.length,r=new Array(p);r[0]=c;var o={};for(var l in n)hasOwnProperty.call(n,l)&&(o[l]=n[l]);o.originalType=e,o[m]="string"==typeof e?e:i,r[1]=o;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>r,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>u});var a=t(7462),i=(t(7294),t(3905));const p={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},r=void 0,o={unversionedId:"kubeflow/advanced-run",id:"version-1.0/kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/en/docs/1.0/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/en/docs/1.0/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/en/docs/1.0/kubeflow/advanced-mlflow"}},l={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],s={toc:u},m="wrapper";function d(e){let{components:n,...p}=e;return(0,i.kt)(m,(0,a.Z)({},s,p,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"Click Run Result and you will see three tabs:\nGraph, Run Output, and Config."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-0.png",src:t(1842).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"graph"},"Graph"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-1.png",src:t(275).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the graph, if you click on the run component, you can check the running information of the component."),(0,i.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,i.kt)("p",null,"The Input/Output tab allows you to view and download the Configurations, Input, and Output Artifacts used in the components."),(0,i.kt)("h3",{id:"logs"},"Logs"),(0,i.kt)("p",null,"In the Logs tab, you can view all the stdout output generated during the execution of the Python code.\nHowever, pods are deleted after a certain period of time, so you may not be able to view them in this tab after a certain time.\nIn that case, you can check them in the main-logs section of the Output artifacts."),(0,i.kt)("h3",{id:"visualizations"},"Visualizations"),(0,i.kt)("p",null,"The Visualizations tab displays plots generated by the components."),(0,i.kt)("p",null,"To generate a plot, you can save the desired values as an argument using ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")'),". The plot should be in HTML format.\nThe conversion process is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,i.kt)("p",null,"If written in pipeline, it will be like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,i.kt)("p",null,"If you run this script and check the resulting ",(0,i.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),", you will see the following."),(0,i.kt)("p",null,(0,i.kt)("details",null,(0,i.kt)("summary",null,"plot_pipeline.yaml"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,i.kt)("p",null,"After running, click Visualization."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-5.png",src:t(9423).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-output"},"Run output"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-2.png",src:t(803).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric)."),(0,i.kt)("p",null,"To show the evaluation index (Metric), you can save the name and value you want to show in the ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument in json format. For example, you can write it like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,i.kt)("p",null,"We will add a component to generate evaluation metrics to the pipeline created in the ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-pipeline"},"Pipeline")," and execute it. The whole pipeline is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,i.kt)("p",null,"After execution, click Run Output and it will show like this."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-4.png",src:t(5665).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"config"},"Config"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-3.png",src:t(896).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the Config tab, you can view all the values received as pipeline configurations."))}d.isMDXComponent=!0},1842:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},275:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},803:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},896:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},5665:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},9423:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file diff --git a/en/assets/js/607d38b2.e9895193.js b/en/assets/js/607d38b2.48c7fea4.js similarity index 99% rename from en/assets/js/607d38b2.e9895193.js rename to en/assets/js/607d38b2.48c7fea4.js index 6869554c..69c9fddd 100644 --- a/en/assets/js/607d38b2.e9895193.js +++ b/en/assets/js/607d38b2.48c7fea4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9945],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function s(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var o=r.createContext({}),i=function(e){var t=r.useContext(o),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},c=function(e){var t=i(e.components);return r.createElement(o.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,o=e.parentName,c=u(e,["components","mdxType","originalType","parentName"]),p=i(n),k=a,b=p["".concat(o,".").concat(k)]||p[k]||d[k]||l;return n?r.createElement(b,s(s({ref:t},c),{},{components:n})):r.createElement(b,s({ref:t},c))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,s=new Array(l);s[0]=k;var u={};for(var o in t)hasOwnProperty.call(t,o)&&(u[o]=t[o]);u.originalType=e,u[p]="string"==typeof e?e:a,s[1]=u;for(var i=2;i{n.r(t),n.d(t,{assets:()=>o,contentTitle:()=>s,default:()=>d,frontMatter:()=>l,metadata:()=>u,toc:()=>i});var r=n(7462),a=(n(7294),n(3905));const l={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},s=void 0,u={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},o={},i=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:i},p="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before building a Kubernetes cluster, install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," and install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Change the configuration of the network for Kubernetes."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{n.d(t,{Zo:()=>c,kt:()=>b});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function l(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function s(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var o=r.createContext({}),i=function(e){var t=r.useContext(o),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},c=function(e){var t=i(e.components);return r.createElement(o.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,l=e.originalType,o=e.parentName,c=u(e,["components","mdxType","originalType","parentName"]),p=i(n),k=a,b=p["".concat(o,".").concat(k)]||p[k]||d[k]||l;return n?r.createElement(b,s(s({ref:t},c),{},{components:n})):r.createElement(b,s({ref:t},c))}));function b(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var l=n.length,s=new Array(l);s[0]=k;var u={};for(var o in t)hasOwnProperty.call(t,o)&&(u[o]=t[o]);u.originalType=e,u[p]="string"==typeof e?e:a,s[1]=u;for(var i=2;i{n.r(t),n.d(t,{assets:()=>o,contentTitle:()=>s,default:()=>d,frontMatter:()=>l,metadata:()=>u,toc:()=>i});var r=n(7462),a=(n(7294),n(3905));const l={title:"4.3. Kubeadm",description:"",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Youngcheol Jang"]},s=void 0,u={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",id:"setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",title:"4.3. Kubeadm",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"4.3. Kubeadm",description:"",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"4.1. K3s",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"},next:{title:"4.2. Minikube",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"}},o={},i=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:i},p="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before building a Kubernetes cluster, install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," and install the necessary components to the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Change the configuration of the network for Kubernetes."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo modprobe br_netfilter\n\ncat <{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var o=r.createContext({}),u=function(e){var t=r.useContext(o),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=u(e.components);return r.createElement(o.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,o=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),c=u(n),f=i,m=c["".concat(o,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var p={};for(var o in t)hasOwnProperty.call(t,o)&&(p[o]=t[o]);p.originalType=e,p[c]="string"==typeof e?e:i,l[1]=p;for(var u=2;u{n.r(t),n.d(t,{assets:()=>o,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>p,toc:()=>u});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"kubeflow/basic-run",id:"kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/en/docs/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/en/docs/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/kubeflow/advanced-component"}},o={},u=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Select Create Run",id:"1-select-create-run",level:3},{value:"2. Select Experiment",id:"2-select-experiment",level:3},{value:"3. Enter Pipeline Config",id:"3-enter-pipeline-config",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"Now we will run the uploaded pipeline."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiments in Kubeflow are units that logically manage runs executed within them."),(0,i.kt)("p",null,"When you first enter the namespace in Kubeflow, there are no Experiments created. Therefore, you must create an Experiment beforehand in order to run the pipeline. If an Experiment already exists, you can go to ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"."),(0,i.kt)("p",null,"Experiments can be created via the Create Experiment button."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(6402).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-1.png",src:n(2813).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-select-create-run"},"1. Select Create Run"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(1567).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-select-experiment"},"2. Select Experiment"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(2090).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(141).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-enter-pipeline-config"},"3. Enter Pipeline Config"),(0,i.kt)("p",null,"Fill in the values of the Config provided when creating the pipeline. The uploaded pipeline requires input values for ",(0,i.kt)("inlineCode",{parentName:"p"},"number_1")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"number_2"),"."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(9208).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"Click the Start button after entering the values. The pipeline will start running."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(666).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"The executed pipelines can be viewed in the Runs tab.\nClicking on a run provides detailed information related to the executed pipeline."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(1552).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(7006).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"When a component has completed execution, it is marked with a green checkmark."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(1847).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5)."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(6944).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},6402:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},2813:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},141:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},1567:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},9208:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},666:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},1552:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},7006:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},1847:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},6944:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},2090:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6749],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var o=r.createContext({}),u=function(e){var t=r.useContext(o),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=u(e.components);return r.createElement(o.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,o=e.parentName,s=p(e,["components","mdxType","originalType","parentName"]),c=u(n),f=i,m=c["".concat(o,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var p={};for(var o in t)hasOwnProperty.call(t,o)&&(p[o]=t[o]);p.originalType=e,p[c]="string"==typeof e?e:i,l[1]=p;for(var u=2;u{n.r(t),n.d(t,{assets:()=>o,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>p,toc:()=>u});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,p={unversionedId:"kubeflow/basic-run",id:"kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/en/docs/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/en/docs/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/kubeflow/advanced-component"}},o={},u=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Select Create Run",id:"1-select-create-run",level:3},{value:"2. Select Experiment",id:"2-select-experiment",level:3},{value:"3. Enter Pipeline Config",id:"3-enter-pipeline-config",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"Now we will run the uploaded pipeline."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiments in Kubeflow are units that logically manage runs executed within them."),(0,i.kt)("p",null,"When you first enter the namespace in Kubeflow, there are no Experiments created. Therefore, you must create an Experiment beforehand in order to run the pipeline. If an Experiment already exists, you can go to ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"."),(0,i.kt)("p",null,"Experiments can be created via the Create Experiment button."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(6402).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-1.png",src:n(2813).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-select-create-run"},"1. Select Create Run"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(1567).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-select-experiment"},"2. Select Experiment"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(2090).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(141).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-enter-pipeline-config"},"3. Enter Pipeline Config"),(0,i.kt)("p",null,"Fill in the values of the Config provided when creating the pipeline. The uploaded pipeline requires input values for ",(0,i.kt)("inlineCode",{parentName:"p"},"number_1")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"number_2"),"."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(9208).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"Click the Start button after entering the values. The pipeline will start running."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(666).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"The executed pipelines can be viewed in the Runs tab.\nClicking on a run provides detailed information related to the executed pipeline."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(1552).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(7006).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"When a component has completed execution, it is marked with a green checkmark."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(1847).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5)."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(6944).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},6402:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},2813:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},141:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},1567:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},9208:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},666:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},1552:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},7006:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},1847:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},6944:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},2090:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file diff --git a/en/assets/js/63323f2d.e0486a3a.js b/en/assets/js/63323f2d.3fe4cc4c.js similarity index 99% rename from en/assets/js/63323f2d.e0486a3a.js rename to en/assets/js/63323f2d.3fe4cc4c.js index 4fb1ff66..9ccbef7b 100644 --- a/en/assets/js/63323f2d.e0486a3a.js +++ b/en/assets/js/63323f2d.3fe4cc4c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5829],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>m});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,l=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=i,m=u["".concat(s,".").concat(c)]||u[c]||h[c]||l;return t?a.createElement(m,r(r({ref:n},v),{},{components:t})):a.createElement(m,r({ref:n},v))}));function m(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var l=t.length,r=new Array(l);r[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:i,r[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>h,frontMatter:()=>l,metadata:()=>o,toc:()=>p});var a=t(7462),i=(t(7294),t(3905));const l={title:"1. Install Python virtual environment",sidebar_position:1},r=void 0,o={unversionedId:"appendix/pyenv",id:"appendix/pyenv",title:"1. Install Python virtual environment",description:"Python virtual environment",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/en/docs/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/pyenv.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Install Python virtual environment",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/en/docs/api-deployment/seldon-children"},next:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/appendix/metallb"}},s={},p=[{value:"Python virtual environment",id:"python-virtual-environment",level:2},{value:"Installing pyenv",id:"installing-pyenv",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"Installation - macOS",id:"installation---macos",level:3},{value:"Installation - Ubuntu",id:"installation---ubuntu",level:3},{value:"Using pyenv",id:"using-pyenv",level:2},{value:"Install python version",id:"install-python-version",level:3},{value:"Create python virtual environment",id:"create-python-virtual-environment",level:3},{value:"Activating python virtual environment",id:"activating-python-virtual-environment",level:3},{value:"Deactivating python virtual environment",id:"deactivating-python-virtual-environment",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,i.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"python-virtual-environment"},"Python virtual environment"),(0,i.kt)("p",null,"When working with Python, there may be cases where you want to use multiple versions of Python environments or manage package versions separately for different projects."),(0,i.kt)("p",null,"To easily manage Python environments or Python package environments in a virtualized manner, there are tools available such as pyenv, conda, virtualenv, and venv."),(0,i.kt)("p",null,"Among these, ",(0,i.kt)("em",{parentName:"p"},"MLOps for ALL")," covers the installation of ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv")," and ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),".",(0,i.kt)("br",{parentName:"p"}),"\n","pyenv helps manage Python versions, while pyenv-virtualenv is a plugin for pyenv that helps manage Python package environments."),(0,i.kt)("h2",{id:"installing-pyenv"},"Installing pyenv"),(0,i.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"Prerequisites vary depending on the operating system. Please refer to the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"following page")," and install the required packages accordingly."),(0,i.kt)("h3",{id:"installation---macos"},"Installation - macOS"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv, pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"For macOS, assuming the use of zsh since the default shell has changed to zsh in Catalina version and later, setting up pyenv."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h3",{id:"installation---ubuntu"},"Installation - Ubuntu"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv and pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,i.kt)("p",null,"If the following content is output, it means that the installation is successful."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\nSkip...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"Assuming the use of bash shell as the default shell, configure pyenv and pyenv-virtualenv to be used in bash."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,i.kt)("p",null,"Enter the following string and save it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,i.kt)("p",null,"Restart the shell."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("p",null,"If the following message is displayed, it means that the settings have been configured correctly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h2",{id:"using-pyenv"},"Using pyenv"),(0,i.kt)("h3",{id:"install-python-version"},"Install python version"),(0,i.kt)("p",null,"Using the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv install ")," command, you can install the desired Python version.",(0,i.kt)("br",{parentName:"p"}),"\n","In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,i.kt)("p",null,"If installed normally, the following message will be printed."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,i.kt)("h3",{id:"create-python-virtual-environment"},"Create python virtual environment"),(0,i.kt)("p",null,"Create a Python virtual environment with the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv ")," command to create a Python virtual environment with the desired Python version."),(0,i.kt)("p",null,"For example, let's create a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo")," with Python 3.7.12 version."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,i.kt)("h3",{id:"activating-python-virtual-environment"},"Activating python virtual environment"),(0,i.kt)("p",null,"Use the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv activate ")," command to use the virtual environment created in this way."),(0,i.kt)("p",null,"For example, we will use a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,i.kt)("p",null,"You can see that the information of the current virtual environment is printed at the front of the shell."),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,i.kt)("h3",{id:"deactivating-python-virtual-environment"},"Deactivating python virtual environment"),(0,i.kt)("p",null,"You can deactivate the currently active virtualenv by using the command ",(0,i.kt)("inlineCode",{parentName:"p"},"source deactivate"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5829],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>m});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,l=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=i,m=u["".concat(s,".").concat(c)]||u[c]||h[c]||l;return t?a.createElement(m,r(r({ref:n},v),{},{components:t})):a.createElement(m,r({ref:n},v))}));function m(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var l=t.length,r=new Array(l);r[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:i,r[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>h,frontMatter:()=>l,metadata:()=>o,toc:()=>p});var a=t(7462),i=(t(7294),t(3905));const l={title:"1. Install Python virtual environment",sidebar_position:1},r=void 0,o={unversionedId:"appendix/pyenv",id:"appendix/pyenv",title:"1. Install Python virtual environment",description:"Python virtual environment",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/en/docs/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/appendix/pyenv.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Install Python virtual environment",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/en/docs/api-deployment/seldon-children"},next:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/appendix/metallb"}},s={},p=[{value:"Python virtual environment",id:"python-virtual-environment",level:2},{value:"Installing pyenv",id:"installing-pyenv",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"Installation - macOS",id:"installation---macos",level:3},{value:"Installation - Ubuntu",id:"installation---ubuntu",level:3},{value:"Using pyenv",id:"using-pyenv",level:2},{value:"Install python version",id:"install-python-version",level:3},{value:"Create python virtual environment",id:"create-python-virtual-environment",level:3},{value:"Activating python virtual environment",id:"activating-python-virtual-environment",level:3},{value:"Deactivating python virtual environment",id:"deactivating-python-virtual-environment",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,i.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"python-virtual-environment"},"Python virtual environment"),(0,i.kt)("p",null,"When working with Python, there may be cases where you want to use multiple versions of Python environments or manage package versions separately for different projects."),(0,i.kt)("p",null,"To easily manage Python environments or Python package environments in a virtualized manner, there are tools available such as pyenv, conda, virtualenv, and venv."),(0,i.kt)("p",null,"Among these, ",(0,i.kt)("em",{parentName:"p"},"MLOps for ALL")," covers the installation of ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv")," and ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),".",(0,i.kt)("br",{parentName:"p"}),"\n","pyenv helps manage Python versions, while pyenv-virtualenv is a plugin for pyenv that helps manage Python package environments."),(0,i.kt)("h2",{id:"installing-pyenv"},"Installing pyenv"),(0,i.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"Prerequisites vary depending on the operating system. Please refer to the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"following page")," and install the required packages accordingly."),(0,i.kt)("h3",{id:"installation---macos"},"Installation - macOS"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv, pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"For macOS, assuming the use of zsh since the default shell has changed to zsh in Catalina version and later, setting up pyenv."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h3",{id:"installation---ubuntu"},"Installation - Ubuntu"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv and pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,i.kt)("p",null,"If the following content is output, it means that the installation is successful."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\nSkip...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"Assuming the use of bash shell as the default shell, configure pyenv and pyenv-virtualenv to be used in bash."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,i.kt)("p",null,"Enter the following string and save it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,i.kt)("p",null,"Restart the shell."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("p",null,"If the following message is displayed, it means that the settings have been configured correctly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h2",{id:"using-pyenv"},"Using pyenv"),(0,i.kt)("h3",{id:"install-python-version"},"Install python version"),(0,i.kt)("p",null,"Using the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv install ")," command, you can install the desired Python version.",(0,i.kt)("br",{parentName:"p"}),"\n","In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,i.kt)("p",null,"If installed normally, the following message will be printed."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,i.kt)("h3",{id:"create-python-virtual-environment"},"Create python virtual environment"),(0,i.kt)("p",null,"Create a Python virtual environment with the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv ")," command to create a Python virtual environment with the desired Python version."),(0,i.kt)("p",null,"For example, let's create a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo")," with Python 3.7.12 version."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,i.kt)("h3",{id:"activating-python-virtual-environment"},"Activating python virtual environment"),(0,i.kt)("p",null,"Use the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv activate ")," command to use the virtual environment created in this way."),(0,i.kt)("p",null,"For example, we will use a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,i.kt)("p",null,"You can see that the information of the current virtual environment is printed at the front of the shell."),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,i.kt)("h3",{id:"deactivating-python-virtual-environment"},"Deactivating python virtual environment"),(0,i.kt)("p",null,"You can deactivate the currently active virtualenv by using the command ",(0,i.kt)("inlineCode",{parentName:"p"},"source deactivate"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/64101c1c.6e4f5a73.js b/en/assets/js/64101c1c.9f684394.js similarity index 99% rename from en/assets/js/64101c1c.6e4f5a73.js rename to en/assets/js/64101c1c.9f684394.js index 904c7e46..c469a50e 100644 --- a/en/assets/js/64101c1c.6e4f5a73.js +++ b/en/assets/js/64101c1c.9f684394.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4826],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=o.createContext({}),p=function(e){var t=o.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=p(e.components);return o.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},k=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(n),k=a,m=c["".concat(s,".").concat(k)]||c[k]||d[k]||r;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,i=new Array(r);i[0]=k;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const r={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"version-1.0/kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/tensorboards"}},s={},p=[{value:"Launch Notebook Server",id:"launch-notebook-server",level:2},{value:"Accessing the Notebook Server",id:"accessing-the-notebook-server",level:2},{value:"Stopping the Notebook Server",id:"stopping-the-notebook-server",level:2}],u={toc:p},c="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(c,(0,o.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"launch-notebook-server"},"Launch Notebook Server"),(0,a.kt)("p",null,"Click on the Notebooks tab on the left side of the Central Dashboard."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:n(7173).Z,width:"3940",height:"1278"})),(0,a.kt)("p",null,"You will see a similar screen."),(0,a.kt)("p",null,"The Notebooks tab is a page where users can independently create and access jupyter notebook and code server environments (hereinafter referred to as a notebook server)."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-home",src:n(8419).Z,width:"5008",height:"2682"})),(0,a.kt)("p",null,'Click the "+ NEW NOTEBOOK" button at the top right. '),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"new-notebook",src:n(929).Z,width:"1900",height:"312"})),(0,a.kt)("p",null,"When the screen shown below appears, now specify the spec (Spec) of the notebook server to be created."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create",src:n(1049).Z,width:"1738",height:"1674"})),(0,a.kt)("details",null,(0,a.kt)("summary",null,"For details for spec:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"name"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies a name to identify the notebook server."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"namespace"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Cannot be changed. (It is automatically set to the namespace of the currently logged-in user account.)"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Image"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Selects the image to use from pre-installed JupyterLab images with Python packages like sklearn, pytorch, tensorflow, etc.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use an image that utilizes GPU within the notebook server, refer to the ",(0,a.kt)("strong",{parentName:"li"},"GPUs")," section below."))),(0,a.kt)("li",{parentName:"ul"},"If you want to use a custom notebook server that includes additional packages or source code, you can create a custom image and deploy it for use."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"CPU / RAM"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of resources required.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"cpu: in core units",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Represents the number of virtual cores, and can also be specified as a float value such as ",(0,a.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,a.kt)("inlineCode",{parentName:"li"},"2.7"),", etc."))),(0,a.kt)("li",{parentName:"ul"},"memory: in Gi units"))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"GPUs"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the number of GPUs to allocate to the Jupyter notebook.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"None"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"When GPU resources are not required."))),(0,a.kt)("li",{parentName:"ul"},"1, 2, 4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Allocates 1, 2, or 4 GPUs."))))),(0,a.kt)("li",{parentName:"ul"},"GPU Vendor:",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you have followed the ",(0,a.kt)("a",{parentName:"li",href:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," guide and installed the NVIDIA GPU plugin, select NVIDIA."))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Workspace Volume"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of disk space required within the notebook server."),(0,a.kt)("li",{parentName:"ul"},"Do not change the Type and Name fields unless you want to increase the disk space or change the AccessMode.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Check the ",(0,a.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," checkbox only if it is not necessary to save the notebook server's work. ",(0,a.kt)("strong",{parentName:"li"},"It is generally recommended not to check this option.")),(0,a.kt)("li",{parentName:"ul"},'If you want to use a pre-existing Persistent Volume Claim (PVC), select Type as "Existing" and enter the name of the PVC to use.'))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Data Volumes"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If additional storage resources are required, click the ",(0,a.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," button to create them."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"These are generally not needed, so detailed explanations are omitted in ",(0,a.kt)("em",{parentName:"li"},"MLOps for All"),"."))))),(0,a.kt)("p",null,"If you followed the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},"Setup GPU (Optional)"),", select NVIDIA if you have installed the nvidia gpu plugin."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"creating",src:n(2733).Z,width:"1928",height:"400"})),(0,a.kt)("p",null,"After creation, the ",(0,a.kt)("strong",{parentName:"p"},"Status")," will change to a green check mark icon, and the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will be activated.\n",(0,a.kt)("img",{alt:"created",src:n(2374).Z,width:"1852",height:"352"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"accessing-the-notebook-server"},"Accessing the Notebook Server"),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will open a new browser window, where you will see the following screen:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-access",src:n(7534).Z,width:"2898",height:"1990"})),(0,a.kt)("p",null,"You can use the Notebook, Console, and Terminal icons in the ",(0,a.kt)("strong",{parentName:"p"},"Launcher")," to start using them."),(0,a.kt)("p",null," Notebook Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-console",src:n(654).Z,width:"2850",height:"736"})),(0,a.kt)("p",null," Terminal Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"terminal-console",src:n(3710).Z,width:"2834",height:"806"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"stopping-the-notebook-server"},"Stopping the Notebook Server"),(0,a.kt)("p",null,"If you haven't used the notebook server for an extended period of time, you can stop it to optimize resource usage in the Kubernetes cluster. ",(0,a.kt)("strong",{parentName:"p"},"Note that stopping the notebook server will result in the deletion of all data stored outside the Workspace Volume or Data Volume specified when creating the notebook server."),(0,a.kt)("br",{parentName:"p"}),"\n","If you haven't changed the path during notebook server creation, the default Workspace Volume path is ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," inside the notebook server, so any data stored outside the ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," directory will be deleted."),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("inlineCode",{parentName:"p"},"STOP")," button as shown below will stop the notebook server:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-stop",src:n(3192).Z,width:"1832",height:"1014"})),(0,a.kt)("p",null,"Once the server is stopped, the ",(0,a.kt)("inlineCode",{parentName:"p"},"CONNECT")," button will be disabled. To restart the notebook server and use it again, click the ",(0,a.kt)("inlineCode",{parentName:"p"},"PLAY")," button."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-restart",src:n(2039).Z,width:"1888",height:"932"})))}d.isMDXComponent=!0},1049:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},2374:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},2733:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},7173:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},929:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},7534:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},654:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},8419:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},2039:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},3192:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},3710:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4826],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=o.createContext({}),p=function(e){var t=o.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=p(e.components);return o.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},k=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(n),k=a,m=c["".concat(s,".").concat(k)]||c[k]||d[k]||r;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,i=new Array(r);i[0]=k;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const r={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"version-1.0/kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/tensorboards"}},s={},p=[{value:"Launch Notebook Server",id:"launch-notebook-server",level:2},{value:"Accessing the Notebook Server",id:"accessing-the-notebook-server",level:2},{value:"Stopping the Notebook Server",id:"stopping-the-notebook-server",level:2}],u={toc:p},c="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(c,(0,o.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"launch-notebook-server"},"Launch Notebook Server"),(0,a.kt)("p",null,"Click on the Notebooks tab on the left side of the Central Dashboard."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:n(7173).Z,width:"3940",height:"1278"})),(0,a.kt)("p",null,"You will see a similar screen."),(0,a.kt)("p",null,"The Notebooks tab is a page where users can independently create and access jupyter notebook and code server environments (hereinafter referred to as a notebook server)."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-home",src:n(8419).Z,width:"5008",height:"2682"})),(0,a.kt)("p",null,'Click the "+ NEW NOTEBOOK" button at the top right. '),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"new-notebook",src:n(929).Z,width:"1900",height:"312"})),(0,a.kt)("p",null,"When the screen shown below appears, now specify the spec (Spec) of the notebook server to be created."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create",src:n(1049).Z,width:"1738",height:"1674"})),(0,a.kt)("details",null,(0,a.kt)("summary",null,"For details for spec:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"name"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies a name to identify the notebook server."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"namespace"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Cannot be changed. (It is automatically set to the namespace of the currently logged-in user account.)"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Image"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Selects the image to use from pre-installed JupyterLab images with Python packages like sklearn, pytorch, tensorflow, etc.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use an image that utilizes GPU within the notebook server, refer to the ",(0,a.kt)("strong",{parentName:"li"},"GPUs")," section below."))),(0,a.kt)("li",{parentName:"ul"},"If you want to use a custom notebook server that includes additional packages or source code, you can create a custom image and deploy it for use."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"CPU / RAM"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of resources required.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"cpu: in core units",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Represents the number of virtual cores, and can also be specified as a float value such as ",(0,a.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,a.kt)("inlineCode",{parentName:"li"},"2.7"),", etc."))),(0,a.kt)("li",{parentName:"ul"},"memory: in Gi units"))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"GPUs"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the number of GPUs to allocate to the Jupyter notebook.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"None"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"When GPU resources are not required."))),(0,a.kt)("li",{parentName:"ul"},"1, 2, 4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Allocates 1, 2, or 4 GPUs."))))),(0,a.kt)("li",{parentName:"ul"},"GPU Vendor:",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you have followed the ",(0,a.kt)("a",{parentName:"li",href:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," guide and installed the NVIDIA GPU plugin, select NVIDIA."))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Workspace Volume"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of disk space required within the notebook server."),(0,a.kt)("li",{parentName:"ul"},"Do not change the Type and Name fields unless you want to increase the disk space or change the AccessMode.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Check the ",(0,a.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," checkbox only if it is not necessary to save the notebook server's work. ",(0,a.kt)("strong",{parentName:"li"},"It is generally recommended not to check this option.")),(0,a.kt)("li",{parentName:"ul"},'If you want to use a pre-existing Persistent Volume Claim (PVC), select Type as "Existing" and enter the name of the PVC to use.'))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Data Volumes"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If additional storage resources are required, click the ",(0,a.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," button to create them."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"These are generally not needed, so detailed explanations are omitted in ",(0,a.kt)("em",{parentName:"li"},"MLOps for All"),"."))))),(0,a.kt)("p",null,"If you followed the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},"Setup GPU (Optional)"),", select NVIDIA if you have installed the nvidia gpu plugin."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"creating",src:n(2733).Z,width:"1928",height:"400"})),(0,a.kt)("p",null,"After creation, the ",(0,a.kt)("strong",{parentName:"p"},"Status")," will change to a green check mark icon, and the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will be activated.\n",(0,a.kt)("img",{alt:"created",src:n(2374).Z,width:"1852",height:"352"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"accessing-the-notebook-server"},"Accessing the Notebook Server"),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will open a new browser window, where you will see the following screen:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-access",src:n(7534).Z,width:"2898",height:"1990"})),(0,a.kt)("p",null,"You can use the Notebook, Console, and Terminal icons in the ",(0,a.kt)("strong",{parentName:"p"},"Launcher")," to start using them."),(0,a.kt)("p",null," Notebook Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-console",src:n(654).Z,width:"2850",height:"736"})),(0,a.kt)("p",null," Terminal Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"terminal-console",src:n(3710).Z,width:"2834",height:"806"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"stopping-the-notebook-server"},"Stopping the Notebook Server"),(0,a.kt)("p",null,"If you haven't used the notebook server for an extended period of time, you can stop it to optimize resource usage in the Kubernetes cluster. ",(0,a.kt)("strong",{parentName:"p"},"Note that stopping the notebook server will result in the deletion of all data stored outside the Workspace Volume or Data Volume specified when creating the notebook server."),(0,a.kt)("br",{parentName:"p"}),"\n","If you haven't changed the path during notebook server creation, the default Workspace Volume path is ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," inside the notebook server, so any data stored outside the ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," directory will be deleted."),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("inlineCode",{parentName:"p"},"STOP")," button as shown below will stop the notebook server:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-stop",src:n(3192).Z,width:"1832",height:"1014"})),(0,a.kt)("p",null,"Once the server is stopped, the ",(0,a.kt)("inlineCode",{parentName:"p"},"CONNECT")," button will be disabled. To restart the notebook server and use it again, click the ",(0,a.kt)("inlineCode",{parentName:"p"},"PLAY")," button."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-restart",src:n(2039).Z,width:"1888",height:"932"})))}d.isMDXComponent=!0},1049:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},2374:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},2733:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},7173:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},929:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},7534:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},654:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},8419:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},2039:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},3192:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},3710:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file diff --git a/en/assets/js/64f10cae.54e3ef79.js b/en/assets/js/64f10cae.2cc4f7ee.js similarity index 99% rename from en/assets/js/64f10cae.54e3ef79.js rename to en/assets/js/64f10cae.2cc4f7ee.js index 7d4182ac..b236ad2e 100644 --- a/en/assets/js/64f10cae.54e3ef79.js +++ b/en/assets/js/64f10cae.2cc4f7ee.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5424],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=i.createContext({}),d=function(e){var t=i.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},c=function(e){var t=d(e.components);return i.createElement(l.Provider,{value:t},e.children)},p="mdxType",h={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},u=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=d(n),u=o,m=p["".concat(l,".").concat(u)]||p[u]||h[u]||a;return n?i.createElement(m,r(r({ref:t},c),{},{components:n})):i.createElement(m,r({ref:t},c))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,r=new Array(a);r[0]=u;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>h,frontMatter:()=>a,metadata:()=>s,toc:()=>d});var i=n(7462),o=(n(7294),n(3905));const a={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/levels",id:"version-1.0/introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/en/docs/1.0/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/levels.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/en/docs/1.0/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/en/docs/1.0/introduction/component"}},l={},d=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"Level 0: Manual Process",id:"level-0-manual-process",level:2},{value:"Level 1: Automated ML Pipeline",id:"level-1-automated-ml-pipeline",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"Level 2: Automating the CI/CD Pipeline",id:"level-2-automating-the-cicd-pipeline",level:2}],c={toc:d},p="wrapper";function h(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,i.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"This page will look at the steps of MLOps outlined by Google and explore what the core features of MLOps are."),(0,o.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,o.kt)("p",null,"Google has been talking about the need for MLOps since as far back as 2015. The paper Hidden Technical Debt in Machine Learning Systems encapsulates this idea from Google. "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"paper",src:n(6061).Z,width:"840",height:"638"})),(0,o.kt)("p",null,"The key takeaway from this paper is that the machine learning code is only a small part of the entire system when it comes to building products with machine learning."),(0,o.kt)("p",null,"Google developed MLOps by evolving this paper and expanding the term. More details can be found on the ",(0,o.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"Google Cloud homepage"),". In this post, we will try to explain what Google means by MLOps."),(0,o.kt)("p",null,"Google divided the evolution of MLOps into three (0-2) stages. Before explaining each stage, let's review some of the concepts described in the previous post."),(0,o.kt)("p",null,"In order to operate a machine learning model, there is a machine learning team responsible for developing the model and an operations team responsible for deployment and operations. MLOps is needed for the successful collaboration of these two teams. We have previously said that it can be done simply through Continuous Integration (CI) / Continuous Deployment (CD), so let us see how to do CI / CD."),(0,o.kt)("h2",{id:"level-0-manual-process"},"Level 0: Manual Process"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-0",src:n(8982).Z,width:"1332",height:"494"})),(0,o.kt)("p",null,'At the 0th stage, two teams communicate through a "model". The machine learning team trains the model with accumulated data and delivers the trained model to the operation team. The operation team then deploys the model delivered in this way.'),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"toon",src:n(9386).Z,width:"1282",height:"1746"})),(0,o.kt)("p",null,'Initial machine learning models are deployed through this "model" centered communication. However, there are several problems with this distribution method. For example, if some functions use Python 3.7 and some use Python 3.8, we often see the following situation.'),(0,o.kt)("p",null,"The reason for this situation lies in the characteristics of the machine learning model. Three things are needed for the trained machine learning model to work:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Python code"),(0,o.kt)("li",{parentName:"ol"},"Trained weights"),(0,o.kt)("li",{parentName:"ol"},"Environment (Packages, versions)")),(0,o.kt)("p",null,"If any of these three aspects is communicated incorrectly, the model may fail to function or make unexpected predictions. However, in many cases, models fail to work due to environmental mismatches. Machine learning relies on various open-source libraries, and due to the nature of open-source, even the same function can produce different results depending on the version used."),(0,o.kt)("p",null,"In the early stages of a service, when there are not many models to manage, these issues can be resolved quickly. However, as the number of managed features increases and communication becomes more challenging, it becomes difficult to deploy models with better performance quickly."),(0,o.kt)("h2",{id:"level-1-automated-ml-pipeline"},"Level 1: Automated ML Pipeline"),(0,o.kt)("h3",{id:"pipeline"},"Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-pipeline",src:n(8463).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,'So, in MLOps, "pipeline" is used to prevent such problems. The MLOps pipeline ensures that the model operates in the same environment as the one used by the machine learning engineer during model development, using containers like Docker. This helps prevent situations where the model doesn\'t work due to differences in the environment.'),(0,o.kt)("p",null,'However, the term "pipeline" is used in a broader context and in various tasks. What is the role of the pipeline that machine learning engineers create? The pipeline created by machine learning engineers produces trained models. Therefore, it would be more accurate to refer to it as a training pipeline rather than just a pipeline.'),(0,o.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-ct.png",src:n(2993).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,"And the concept of Continuous Training (CT) is added. So why is CT necessary?"),(0,o.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,o.kt)("p",null,'In the real world, data exhibits a characteristic called "Data Shift," where the data distribution keeps changing over time. As a result, models trained in the past may experience performance degradation over time. The simplest and most effective solution to this problem is to retrain the model using recent data. By retraining the model according to the changed data distribution, it can regain its performance.'),(0,o.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,o.kt)("p",null,"However, in industries such as manufacturing, where multiple recipes are processed in a single factory, it may not always be desirable to retrain the model unconditionally. One common example is the blind spot."),(0,o.kt)("p",null,"For example, in an automotive production line, a model A was created and used for predictions. If an entirely different model B is introduced, it represents unseen data patterns, and a new model is trained for model B."),(0,o.kt)("p",null,'Now, the model will make predictions for model B. However, if the data switches back to model A, what should be done?\nIf there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.'),(0,o.kt)("p",null,"There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy."),(0,o.kt)("p",null,"To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously."),(0,o.kt)("h2",{id:"level-2-automating-the-cicd-pipeline"},"Level 2: Automating the CI/CD Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-2",src:n(601).Z,width:"1356",height:"862"})),(0,o.kt)("p",null,"The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?"),(0,o.kt)("p",null,"In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline."),(0,o.kt)("p",null,"Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training."),(0,o.kt)("p",null,"In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly."),(0,o.kt)("p",null,"In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model."))}h.isMDXComponent=!0},8982:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},2993:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},8463:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},601:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},6061:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},9386:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5424],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=i.createContext({}),d=function(e){var t=i.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},c=function(e){var t=d(e.components);return i.createElement(l.Provider,{value:t},e.children)},p="mdxType",h={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},u=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=d(n),u=o,m=p["".concat(l,".").concat(u)]||p[u]||h[u]||a;return n?i.createElement(m,r(r({ref:t},c),{},{components:n})):i.createElement(m,r({ref:t},c))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,r=new Array(a);r[0]=u;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>h,frontMatter:()=>a,metadata:()=>s,toc:()=>d});var i=n(7462),o=(n(7294),n(3905));const a={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/levels",id:"version-1.0/introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/en/docs/1.0/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/levels.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/en/docs/1.0/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/en/docs/1.0/introduction/component"}},l={},d=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"Level 0: Manual Process",id:"level-0-manual-process",level:2},{value:"Level 1: Automated ML Pipeline",id:"level-1-automated-ml-pipeline",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"Level 2: Automating the CI/CD Pipeline",id:"level-2-automating-the-cicd-pipeline",level:2}],c={toc:d},p="wrapper";function h(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,i.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"This page will look at the steps of MLOps outlined by Google and explore what the core features of MLOps are."),(0,o.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,o.kt)("p",null,"Google has been talking about the need for MLOps since as far back as 2015. The paper Hidden Technical Debt in Machine Learning Systems encapsulates this idea from Google. "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"paper",src:n(6061).Z,width:"840",height:"638"})),(0,o.kt)("p",null,"The key takeaway from this paper is that the machine learning code is only a small part of the entire system when it comes to building products with machine learning."),(0,o.kt)("p",null,"Google developed MLOps by evolving this paper and expanding the term. More details can be found on the ",(0,o.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"Google Cloud homepage"),". In this post, we will try to explain what Google means by MLOps."),(0,o.kt)("p",null,"Google divided the evolution of MLOps into three (0-2) stages. Before explaining each stage, let's review some of the concepts described in the previous post."),(0,o.kt)("p",null,"In order to operate a machine learning model, there is a machine learning team responsible for developing the model and an operations team responsible for deployment and operations. MLOps is needed for the successful collaboration of these two teams. We have previously said that it can be done simply through Continuous Integration (CI) / Continuous Deployment (CD), so let us see how to do CI / CD."),(0,o.kt)("h2",{id:"level-0-manual-process"},"Level 0: Manual Process"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-0",src:n(8982).Z,width:"1332",height:"494"})),(0,o.kt)("p",null,'At the 0th stage, two teams communicate through a "model". The machine learning team trains the model with accumulated data and delivers the trained model to the operation team. The operation team then deploys the model delivered in this way.'),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"toon",src:n(9386).Z,width:"1282",height:"1746"})),(0,o.kt)("p",null,'Initial machine learning models are deployed through this "model" centered communication. However, there are several problems with this distribution method. For example, if some functions use Python 3.7 and some use Python 3.8, we often see the following situation.'),(0,o.kt)("p",null,"The reason for this situation lies in the characteristics of the machine learning model. Three things are needed for the trained machine learning model to work:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Python code"),(0,o.kt)("li",{parentName:"ol"},"Trained weights"),(0,o.kt)("li",{parentName:"ol"},"Environment (Packages, versions)")),(0,o.kt)("p",null,"If any of these three aspects is communicated incorrectly, the model may fail to function or make unexpected predictions. However, in many cases, models fail to work due to environmental mismatches. Machine learning relies on various open-source libraries, and due to the nature of open-source, even the same function can produce different results depending on the version used."),(0,o.kt)("p",null,"In the early stages of a service, when there are not many models to manage, these issues can be resolved quickly. However, as the number of managed features increases and communication becomes more challenging, it becomes difficult to deploy models with better performance quickly."),(0,o.kt)("h2",{id:"level-1-automated-ml-pipeline"},"Level 1: Automated ML Pipeline"),(0,o.kt)("h3",{id:"pipeline"},"Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-pipeline",src:n(8463).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,'So, in MLOps, "pipeline" is used to prevent such problems. The MLOps pipeline ensures that the model operates in the same environment as the one used by the machine learning engineer during model development, using containers like Docker. This helps prevent situations where the model doesn\'t work due to differences in the environment.'),(0,o.kt)("p",null,'However, the term "pipeline" is used in a broader context and in various tasks. What is the role of the pipeline that machine learning engineers create? The pipeline created by machine learning engineers produces trained models. Therefore, it would be more accurate to refer to it as a training pipeline rather than just a pipeline.'),(0,o.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-ct.png",src:n(2993).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,"And the concept of Continuous Training (CT) is added. So why is CT necessary?"),(0,o.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,o.kt)("p",null,'In the real world, data exhibits a characteristic called "Data Shift," where the data distribution keeps changing over time. As a result, models trained in the past may experience performance degradation over time. The simplest and most effective solution to this problem is to retrain the model using recent data. By retraining the model according to the changed data distribution, it can regain its performance.'),(0,o.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,o.kt)("p",null,"However, in industries such as manufacturing, where multiple recipes are processed in a single factory, it may not always be desirable to retrain the model unconditionally. One common example is the blind spot."),(0,o.kt)("p",null,"For example, in an automotive production line, a model A was created and used for predictions. If an entirely different model B is introduced, it represents unseen data patterns, and a new model is trained for model B."),(0,o.kt)("p",null,'Now, the model will make predictions for model B. However, if the data switches back to model A, what should be done?\nIf there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.'),(0,o.kt)("p",null,"There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy."),(0,o.kt)("p",null,"To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously."),(0,o.kt)("h2",{id:"level-2-automating-the-cicd-pipeline"},"Level 2: Automating the CI/CD Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-2",src:n(601).Z,width:"1356",height:"862"})),(0,o.kt)("p",null,"The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?"),(0,o.kt)("p",null,"In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline."),(0,o.kt)("p",null,"Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training."),(0,o.kt)("p",null,"In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly."),(0,o.kt)("p",null,"In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model."))}h.isMDXComponent=!0},8982:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},2993:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},8463:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},601:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},6061:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},9386:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file diff --git a/en/assets/js/656f3db8.d9ffe346.js b/en/assets/js/656f3db8.5871db88.js similarity index 98% rename from en/assets/js/656f3db8.d9ffe346.js rename to en/assets/js/656f3db8.5871db88.js index 4be92539..387a8c42 100644 --- a/en/assets/js/656f3db8.d9ffe346.js +++ b/en/assets/js/656f3db8.5871db88.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6210],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},b=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),b=a,m=c["".concat(s,".").concat(b)]||c[b]||d[b]||o;return r?n.createElement(m,i(i({ref:t},p),{},{components:r})):n.createElement(m,i({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=b;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>u});var n=r(7462),a=(r(7294),r(3905));const o={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/en/docs/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/en/docs/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/en/docs/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Once you have completed ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-kf"},"Kubeflow installation"),", you can access the dashboard through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"after-login",src:r(5794).Z,width:"4008",height:"1266"})),(0,a.kt)("p",null,"The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Home"),(0,a.kt)("li",{parentName:"ul"},"Notebooks"),(0,a.kt)("li",{parentName:"ul"},"Tensorboards"),(0,a.kt)("li",{parentName:"ul"},"Volumes"),(0,a.kt)("li",{parentName:"ul"},"Models"),(0,a.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,a.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,a.kt)("li",{parentName:"ul"},"Pipelines"),(0,a.kt)("li",{parentName:"ul"},"Runs"),(0,a.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,a.kt)("li",{parentName:"ul"},"Artifacts"),(0,a.kt)("li",{parentName:"ul"},"Executions")),(0,a.kt)("p",null,"Let's now look at the simple usage of each feature."))}d.isMDXComponent=!0},5794:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},9268:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6210],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},b=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),c=u(r),b=a,m=c["".concat(s,".").concat(b)]||c[b]||d[b]||o;return r?n.createElement(m,i(i({ref:t},p),{},{components:r})):n.createElement(m,i({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=b;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>u});var n=r(7462),a=(r(7294),r(3905));const o={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/en/docs/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/en/docs/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/en/docs/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...o}=e;return(0,a.kt)(c,(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Once you have completed ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-kf"},"Kubeflow installation"),", you can access the dashboard through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"after-login",src:r(5794).Z,width:"4008",height:"1266"})),(0,a.kt)("p",null,"The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Home"),(0,a.kt)("li",{parentName:"ul"},"Notebooks"),(0,a.kt)("li",{parentName:"ul"},"Tensorboards"),(0,a.kt)("li",{parentName:"ul"},"Volumes"),(0,a.kt)("li",{parentName:"ul"},"Models"),(0,a.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,a.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,a.kt)("li",{parentName:"ul"},"Pipelines"),(0,a.kt)("li",{parentName:"ul"},"Runs"),(0,a.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,a.kt)("li",{parentName:"ul"},"Artifacts"),(0,a.kt)("li",{parentName:"ul"},"Executions")),(0,a.kt)("p",null,"Let's now look at the simple usage of each feature."))}d.isMDXComponent=!0},5794:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},9268:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/en/assets/js/6a39bdb7.454b7333.js b/en/assets/js/6a39bdb7.eb7e91f0.js similarity index 97% rename from en/assets/js/6a39bdb7.454b7333.js rename to en/assets/js/6a39bdb7.eb7e91f0.js index 0bd31c82..f8246ebc 100644 --- a/en/assets/js/6a39bdb7.454b7333.js +++ b/en/assets/js/6a39bdb7.eb7e91f0.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2032],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>b});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function a(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=o.createContext({}),p=function(e){var t=o.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},u=function(e){var t=p(e.components);return o.createElement(s.Provider,{value:t},e.children)},c="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},d=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(n),d=r,b=c["".concat(s,".").concat(d)]||c[d]||f[d]||i;return n?o.createElement(b,a(a({ref:t},u),{},{components:n})):o.createElement(b,a({ref:t},u))}));function b(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=n.length,a=new Array(i);a[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:r,a[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>f,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var o=n(7462),r=(n(7294),n(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},a=void 0,l={unversionedId:"kubeflow/kubeflow-intro",id:"version-1.0/kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/en/docs/1.0/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/en/docs/1.0/kubeflow/kubeflow-concepts"}},s={},p=[],u={toc:p},c="wrapper";function f(e){let{components:t,...n}=e;return(0,r.kt)(c,(0,o.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"To use Kubeflow, you need to write components and pipelines."),(0,r.kt)("p",null,"The approach described in ",(0,r.kt)("em",{parentName:"p"},"MLOps for ALL")," differs slightly from the method described on the ",(0,r.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline official website"),". Here, Kubeflow Pipeline is used as one of the components in the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"elements that make up MLOps")," rather than a standalone workflow."),(0,r.kt)("p",null,"Now, let's understand what components and pipelines are and how to write them."))}f.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2032],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>b});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function a(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=o.createContext({}),p=function(e){var t=o.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},u=function(e){var t=p(e.components);return o.createElement(s.Provider,{value:t},e.children)},c="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},d=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(n),d=r,b=c["".concat(s,".").concat(d)]||c[d]||f[d]||i;return n?o.createElement(b,a(a({ref:t},u),{},{components:n})):o.createElement(b,a({ref:t},u))}));function b(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var i=n.length,a=new Array(i);a[0]=d;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:r,a[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>f,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var o=n(7462),r=(n(7294),n(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},a=void 0,l={unversionedId:"kubeflow/kubeflow-intro",id:"version-1.0/kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/en/docs/1.0/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/en/docs/1.0/kubeflow/kubeflow-concepts"}},s={},p=[],u={toc:p},c="wrapper";function f(e){let{components:t,...n}=e;return(0,r.kt)(c,(0,o.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"To use Kubeflow, you need to write components and pipelines."),(0,r.kt)("p",null,"The approach described in ",(0,r.kt)("em",{parentName:"p"},"MLOps for ALL")," differs slightly from the method described on the ",(0,r.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline official website"),". Here, Kubeflow Pipeline is used as one of the components in the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"elements that make up MLOps")," rather than a standalone workflow."),(0,r.kt)("p",null,"Now, let's understand what components and pipelines are and how to write them."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/6d1a6fc6.2bbcd495.js b/en/assets/js/6d1a6fc6.f58f790b.js similarity index 99% rename from en/assets/js/6d1a6fc6.2bbcd495.js rename to en/assets/js/6d1a6fc6.f58f790b.js index 9cc3fb9f..5e772766 100644 --- a/en/assets/js/6d1a6fc6.2bbcd495.js +++ b/en/assets/js/6d1a6fc6.f58f790b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7053],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=a.createContext({}),s=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=s(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,l=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),m=s(t),_=r,u=m["".concat(l,".").concat(_)]||m[_]||c[_]||i;return t?a.createElement(u,o(o({ref:n},d),{},{components:t})):a.createElement(u,o({ref:n},d))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=_;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>p,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const i={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},o=void 0,p={unversionedId:"kubeflow/advanced-environment",id:"version-1.0/kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/en/docs/1.0/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-environment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/1.0/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/en/docs/1.0/kubeflow/advanced-pipeline"}},l={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Adding packages",id:"adding-packages",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],d={toc:s},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"When we run the pipeline written in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),", it fails. Let's find out why it fails and modify it so that it can run properly. "),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Let's convert the component written ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-component#convert-to-kubeflow-format"},"earlier")," into a yaml file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you run the script above, you will get a ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file like the one below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"According to the content explained in the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component")," previously mentioned, this component will be executed as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"However, when running the component created above, an error will occur.",(0,r.kt)("br",{parentName:"p"}),"\n","The reason is in the way the component wrapper is executed.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow uses Kubernetes, so the component wrapper runs the component content on its own separate container."),(0,r.kt)("p",null,"In detail, the image specified in the generated ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7"),"."),(0,r.kt)("p",null,"There may be some people who notice why it is not running for some reason."),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," image does not have the packages we want to use, such as ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", and ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn"),", installed.",(0,r.kt)("br",{parentName:"p"}),"\n","Therefore, when executing, it fails with an error indicating that the packages are not found."),(0,r.kt)("p",null,"So, how can we add the packages?"),(0,r.kt)("h2",{id:"adding-packages"},"Adding packages"),(0,r.kt)("p",null,"During the process of converting Kubeflow, there are two ways to add packages:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"base_image")),(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install"))),(0,r.kt)("p",null,"Let's check what arguments the function ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," used to compile the components can receive."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": Function that creates the component wrapper to be made into a component."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": Image that the component wrapper will run on."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": Additional packages that need to be installed for the component to use.")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"Take a closer look at the sequence in which the component is executed and it will be as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"If the base_image used by the component already has all the packages installed, you can use it without installing additional packages."),(0,r.kt)("p",null,"For example, on this page we are going to write a Dockerfile like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"Let's build the image using the Dockerfile above. The Docker hub we will use for the practice is ghcr.",(0,r.kt)("br",{parentName:"p"}),"\n","You can choose a Docker hub according to your environment and upload it."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"Now let's try inputting the base image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you compile the generated component, it will appear as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"We can confirm that the base_image has been changed to the value we have set."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"However, when packages are added, it takes a lot of time to create a new Docker image.\nIn this case, we can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument to easily add packages to the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you execute the script, the ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"If we take a closer look at the order in which the components written above are executed, it looks like this:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7053],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=a.createContext({}),s=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=s(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,l=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),m=s(t),_=r,u=m["".concat(l,".").concat(_)]||m[_]||c[_]||i;return t?a.createElement(u,o(o({ref:n},d),{},{components:t})):a.createElement(u,o({ref:n},d))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=_;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>p,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const i={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},o=void 0,p={unversionedId:"kubeflow/advanced-environment",id:"version-1.0/kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/en/docs/1.0/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-environment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/1.0/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/en/docs/1.0/kubeflow/advanced-pipeline"}},l={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Adding packages",id:"adding-packages",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],d={toc:s},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"When we run the pipeline written in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),", it fails. Let's find out why it fails and modify it so that it can run properly. "),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Let's convert the component written ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-component#convert-to-kubeflow-format"},"earlier")," into a yaml file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you run the script above, you will get a ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file like the one below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"According to the content explained in the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component")," previously mentioned, this component will be executed as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"However, when running the component created above, an error will occur.",(0,r.kt)("br",{parentName:"p"}),"\n","The reason is in the way the component wrapper is executed.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow uses Kubernetes, so the component wrapper runs the component content on its own separate container."),(0,r.kt)("p",null,"In detail, the image specified in the generated ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7"),"."),(0,r.kt)("p",null,"There may be some people who notice why it is not running for some reason."),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," image does not have the packages we want to use, such as ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", and ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn"),", installed.",(0,r.kt)("br",{parentName:"p"}),"\n","Therefore, when executing, it fails with an error indicating that the packages are not found."),(0,r.kt)("p",null,"So, how can we add the packages?"),(0,r.kt)("h2",{id:"adding-packages"},"Adding packages"),(0,r.kt)("p",null,"During the process of converting Kubeflow, there are two ways to add packages:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"base_image")),(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install"))),(0,r.kt)("p",null,"Let's check what arguments the function ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," used to compile the components can receive."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": Function that creates the component wrapper to be made into a component."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": Image that the component wrapper will run on."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": Additional packages that need to be installed for the component to use.")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"Take a closer look at the sequence in which the component is executed and it will be as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"If the base_image used by the component already has all the packages installed, you can use it without installing additional packages."),(0,r.kt)("p",null,"For example, on this page we are going to write a Dockerfile like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"Let's build the image using the Dockerfile above. The Docker hub we will use for the practice is ghcr.",(0,r.kt)("br",{parentName:"p"}),"\n","You can choose a Docker hub according to your environment and upload it."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"Now let's try inputting the base image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you compile the generated component, it will appear as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"We can confirm that the base_image has been changed to the value we have set."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"However, when packages are added, it takes a lot of time to create a new Docker image.\nIn this case, we can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument to easily add packages to the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you execute the script, the ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"If we take a closer look at the order in which the components written above are executed, it looks like this:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/74126281.013b4470.js b/en/assets/js/74126281.f0fa827c.js similarity index 97% rename from en/assets/js/74126281.013b4470.js rename to en/assets/js/74126281.f0fa827c.js index aeef1b68..669ccd36 100644 --- a/en/assets/js/74126281.013b4470.js +++ b/en/assets/js/74126281.f0fa827c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4586],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function l(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),c=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},p=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=c(r),k=a,m=u["".concat(s,".").concat(k)]||u[k]||d[k]||o;return r?n.createElement(m,l(l({ref:t},p),{},{components:r})):n.createElement(m,l({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,l=new Array(o);l[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:a,l[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const o={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/install",id:"prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/en/docs/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/install.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/prerequisites/docker/introduction"}},s={},c=[{value:"Docker",id:"docker",level:2},{value:"Check Installation",id:"check-installation",level:2},{value:"Before diving in..",id:"before-diving-in",level:2}],p={toc:c},u="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(u,(0,n.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"To practice Docker, you need to install Docker.",(0,a.kt)("br",{parentName:"p"}),"\n","The Docker installation varies depending on which OS you are using.",(0,a.kt)("br",{parentName:"p"}),"\n","Please refer to the official website for the Docker installation that fits your environment: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"check-installation"},"Check Installation"),(0,a.kt)("p",null,"Check installation requires an OS, terminal environment where ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," runs correctly."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"before-diving-in"},"Before diving in.."),(0,a.kt)("p",null,"It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps."))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4586],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function l(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),c=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):l(l({},t),e)),r},p=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},k=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),u=c(r),k=a,m=u["".concat(s,".").concat(k)]||u[k]||d[k]||o;return r?n.createElement(m,l(l({ref:t},p),{},{components:r})):n.createElement(m,l({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,l=new Array(o);l[0]=k;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[u]="string"==typeof e?e:a,l[1]=i;for(var c=2;c{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var n=r(7462),a=(r(7294),r(3905));const o={title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/install",id:"prerequisites/docker/install",title:"Install Docker",description:"Install docker to start.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/install.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/install",permalink:"/en/docs/prerequisites/docker/install",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/install.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"Install Docker",description:"Install docker to start.",sidebar_position:1,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",next:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/prerequisites/docker/introduction"}},s={},c=[{value:"Docker",id:"docker",level:2},{value:"Check Installation",id:"check-installation",level:2},{value:"Before diving in..",id:"before-diving-in",level:2}],p={toc:c},u="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(u,(0,n.Z)({},p,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"docker"},"Docker"),(0,a.kt)("p",null,"To practice Docker, you need to install Docker.",(0,a.kt)("br",{parentName:"p"}),"\n","The Docker installation varies depending on which OS you are using.",(0,a.kt)("br",{parentName:"p"}),"\n","Please refer to the official website for the Docker installation that fits your environment: "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"ubuntu")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/mac/install/"},"mac")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.docker.com/desktop/windows/install/"},"windows"))),(0,a.kt)("h2",{id:"check-installation"},"Check Installation"),(0,a.kt)("p",null,"Check installation requires an OS, terminal environment where ",(0,a.kt)("inlineCode",{parentName:"p"},"docker run hello-world")," runs correctly."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"OS"),(0,a.kt)("th",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("th",{parentName:"tr",align:null},"Terminal"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MacOS"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"zsh")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"Powershell")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Windows"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Desktop"),(0,a.kt)("td",{parentName:"tr",align:null},"WSL2")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"Docker Engine"),(0,a.kt)("td",{parentName:"tr",align:null},"bash")))),(0,a.kt)("h2",{id:"before-diving-in"},"Before diving in.."),(0,a.kt)("p",null,"It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/74d04fec.2f95ae86.js b/en/assets/js/74d04fec.d7ccdd67.js similarity index 99% rename from en/assets/js/74d04fec.2f95ae86.js rename to en/assets/js/74d04fec.d7ccdd67.js index 903de50c..f7a30810 100644 --- a/en/assets/js/74d04fec.2f95ae86.js +++ b/en/assets/js/74d04fec.d7ccdd67.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7525],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var i=r.createContext({}),l=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=l(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},b=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,o=e.originalType,i=e.parentName,p=u(e,["components","mdxType","originalType","parentName"]),c=l(n),b=s,m=c["".concat(i,".").concat(b)]||c[b]||d[b]||o;return n?r.createElement(m,a(a({ref:t},p),{},{components:n})):r.createElement(m,a({ref:t},p))}));function m(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var o=n.length,a=new Array(o);a[0]=b;var u={};for(var i in t)hasOwnProperty.call(t,i)&&(u[i]=t[i]);u.originalType=e,u[c]="string"==typeof e?e:s,a[1]=u;for(var l=2;l{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>a,default:()=>d,frontMatter:()=>o,metadata:()=>u,toc:()=>l});var r=n(7462),s=(n(7294),n(3905));const o={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},a=void 0,u={unversionedId:"setup-kubernetes/kubernetes",id:"version-1.0/setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/en/docs/1.0/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/en/docs/1.0/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/en/docs/1.0/setup-kubernetes/install-prerequisite"}},i={},l=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],p={toc:l},c="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,s.kt)("p",null,"For those learning Kubernetes for the first time, the first barrier to entry is setting up a Kubernetes practice environment."),(0,s.kt)("p",null,"The official tool that supports building a production-level Kubernetes cluster is kubeadm, but there are also tools such as kubespray and kops that help users set up more easily, and tools such as k3s, minikube, microk8s, and kind that help you set up a compact Kubernetes cluster easily for learning purposes."),(0,s.kt)("p",null,"Each tool has its own advantages and disadvantages, so considering the preferences of each user, this article will use three tools: kubeadm, k3s, and minikube to set up a Kubernetes cluster.\nFor detailed comparisons of each tool, please refer to the official Kubernetes ",(0,s.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"documentation"),"."),(0,s.kt)("p",null,(0,s.kt)("em",{parentName:"p"},"MLOps for ALL")," recommends ",(0,s.kt)("strong",{parentName:"p"},"k3s")," as a tool that is easy to use when setting up a Kubernetes cluster."),(0,s.kt)("p",null,"If you want to use all the features of Kubernetes and configure the nodes, we recommend ",(0,s.kt)("strong",{parentName:"p"},"kubeadm"),".",(0,s.kt)("br",{parentName:"p"}),"\n",(0,s.kt)("strong",{parentName:"p"},"minikube")," has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe."),(0,s.kt)("p",null,"In this ",(0,s.kt)("em",{parentName:"p"},"MLOps for ALL"),", in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools."),(0,s.kt)("p",null,"The scope of this ",(0,s.kt)("strong",{parentName:"p"},"Setup Kubernetes")," section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster."),(0,s.kt)("p",null,"The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,s.kt)("p",null,"Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with."))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7525],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>m});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var i=r.createContext({}),l=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=l(e.components);return r.createElement(i.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},b=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,o=e.originalType,i=e.parentName,p=u(e,["components","mdxType","originalType","parentName"]),c=l(n),b=s,m=c["".concat(i,".").concat(b)]||c[b]||d[b]||o;return n?r.createElement(m,a(a({ref:t},p),{},{components:n})):r.createElement(m,a({ref:t},p))}));function m(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var o=n.length,a=new Array(o);a[0]=b;var u={};for(var i in t)hasOwnProperty.call(t,i)&&(u[i]=t[i]);u.originalType=e,u[c]="string"==typeof e?e:s,a[1]=u;for(var l=2;l{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>a,default:()=>d,frontMatter:()=>o,metadata:()=>u,toc:()=>l});var r=n(7462),s=(n(7294),n(3905));const o={title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},a=void 0,u={unversionedId:"setup-kubernetes/kubernetes",id:"version-1.0/setup-kubernetes/kubernetes",title:"2. Setup Kubernetes",description:"Setup Kubernetes",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/kubernetes.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/kubernetes",permalink:"/en/docs/1.0/setup-kubernetes/kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/kubernetes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Setup Kubernetes",description:"Setup Kubernetes",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Introduction",permalink:"/en/docs/1.0/setup-kubernetes/intro"},next:{title:"3. Install Prerequisite",permalink:"/en/docs/1.0/setup-kubernetes/install-prerequisite"}},i={},l=[{value:"Setup Kubernetes Cluster",id:"setup-kubernetes-cluster",level:2}],p={toc:l},c="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(c,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"setup-kubernetes-cluster"},"Setup Kubernetes Cluster"),(0,s.kt)("p",null,"For those learning Kubernetes for the first time, the first barrier to entry is setting up a Kubernetes practice environment."),(0,s.kt)("p",null,"The official tool that supports building a production-level Kubernetes cluster is kubeadm, but there are also tools such as kubespray and kops that help users set up more easily, and tools such as k3s, minikube, microk8s, and kind that help you set up a compact Kubernetes cluster easily for learning purposes."),(0,s.kt)("p",null,"Each tool has its own advantages and disadvantages, so considering the preferences of each user, this article will use three tools: kubeadm, k3s, and minikube to set up a Kubernetes cluster.\nFor detailed comparisons of each tool, please refer to the official Kubernetes ",(0,s.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/tasks/tools/"},"documentation"),"."),(0,s.kt)("p",null,(0,s.kt)("em",{parentName:"p"},"MLOps for ALL")," recommends ",(0,s.kt)("strong",{parentName:"p"},"k3s")," as a tool that is easy to use when setting up a Kubernetes cluster."),(0,s.kt)("p",null,"If you want to use all the features of Kubernetes and configure the nodes, we recommend ",(0,s.kt)("strong",{parentName:"p"},"kubeadm"),".",(0,s.kt)("br",{parentName:"p"}),"\n",(0,s.kt)("strong",{parentName:"p"},"minikube")," has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe."),(0,s.kt)("p",null,"In this ",(0,s.kt)("em",{parentName:"p"},"MLOps for ALL"),", in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools."),(0,s.kt)("p",null,"The scope of this ",(0,s.kt)("strong",{parentName:"p"},"Setup Kubernetes")," section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster."),(0,s.kt)("p",null,"The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"3. Setup Prerequisite\n4. Setup Kubernetes\n 4.1. with k3s\n 4.2. with minikube\n 4.3. with kubeadm\n5. Setup Kubernetes Modules\n")),(0,s.kt)("p",null,"Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/77df73f1.b5753fa9.js b/en/assets/js/77df73f1.66a549ae.js similarity index 98% rename from en/assets/js/77df73f1.b5753fa9.js rename to en/assets/js/77df73f1.66a549ae.js index 13ecee0e..fd9758d0 100644 --- a/en/assets/js/77df73f1.b5753fa9.js +++ b/en/assets/js/77df73f1.66a549ae.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1011],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),u=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=u(r),f=o,b=c["".concat(l,".").concat(f)]||c[f]||d[f]||a;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"version-1.0/kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"}},l={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7173).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(7079).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"The Experiments(AutoML) page is where you can manage ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),", which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow."),(0,o.kt)("p",null,"The usage of Katib and Experiments(AutoML) is not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for Everyone")," v1.0, and will be added in v2.0."))}d.isMDXComponent=!0},7079:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},7173:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1011],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),u=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=u(r),f=o,b=c["".concat(l,".").concat(f)]||c[f]||d[f]||a;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"version-1.0/kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others"}},l={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(7173).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(7079).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"The Experiments(AutoML) page is where you can manage ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),", which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow."),(0,o.kt)("p",null,"The usage of Katib and Experiments(AutoML) is not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for Everyone")," v1.0, and will be added in v2.0."))}d.isMDXComponent=!0},7079:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},7173:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/en/assets/js/806f62a0.5e34d646.js b/en/assets/js/806f62a0.2edc5228.js similarity index 98% rename from en/assets/js/806f62a0.5e34d646.js rename to en/assets/js/806f62a0.2edc5228.js index 5c5a5df4..c20870a6 100644 --- a/en/assets/js/806f62a0.5e34d646.js +++ b/en/assets/js/806f62a0.2edc5228.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1512],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>f});var a=n(7294);function o(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function i(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function r(t){for(var e=1;e=0||(o[n]=t[n]);return o}(t,e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(o[n]=t[n])}return o}var p=a.createContext({}),d=function(t){var e=a.useContext(p),n=e;return t&&(n="function"==typeof t?t(e):r(r({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(p.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,o=t.mdxType,i=t.originalType,p=t.parentName,s=l(t,["components","mdxType","originalType","parentName"]),u=d(n),m=o,f=u["".concat(p,".").concat(m)]||u[m]||c[m]||i;return n?a.createElement(f,r(r({ref:e},s),{},{components:n})):a.createElement(f,r({ref:e},s))}));function f(t,e){var n=arguments,o=e&&e.mdxType;if("string"==typeof t||o){var i=n.length,r=new Array(i);r[0]=m;var l={};for(var p in e)hasOwnProperty.call(e,p)&&(l[p]=e[p]);l.originalType=t,l[u]="string"==typeof t?t:o,r[1]=l;for(var d=2;d{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>c,frontMatter:()=>i,metadata:()=>l,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const i={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},r=void 0,l={unversionedId:"kubeflow/how-to-debug",id:"version-1.0/kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/en/docs/1.0/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/how-to-debug.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/en/docs/1.0/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/en/docs/1.0/api-deployment/what-is-api-deployment"}},p={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...i}=t;return(0,o.kt)(u,(0,a.Z)({},s,i,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,o.kt)("p",null,"This page covers how to debug Kubeflow components."),(0,o.kt)("h2",{id:"failed-component"},"Failed Component"),(0,o.kt)("p",null,"We will modify a pipeline used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," in this page."),(0,o.kt)("p",null,"First, let's modify the pipeline so that the component fails."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,o.kt)("p",null,"The modifications are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"load_iris_data")," component for loading data, ",(0,o.kt)("inlineCode",{parentName:"li"},"None")," was injected into the ",(0,o.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," feature."),(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," component, use the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na()")," function to remove rows with na values.")),(0,o.kt)("p",null,"Now let's upload and run the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","After running, if you press Run you will see that it has failed in the ",(0,o.kt)("inlineCode",{parentName:"p"},"Train from csv")," component."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-0.png",src:n(4904).Z,width:"2826",height:"1790"})),(0,o.kt)("p",null,"Click on the failed component and check the log to see the reason for the failure."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-2.png",src:n(2928).Z,width:"2826",height:"1796"})),(0,o.kt)("p",null,"If the log shows that the data count is 0 and the component did not run, there may be an issue with the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","Let's investigate what might be the problem."),(0,o.kt)("p",null,"First, click on the component and go to the Input/Output tab to download the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","You can click on the link indicated by the red square to download the data."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-5.png",src:n(9644).Z,width:"2690",height:"1740"})),(0,o.kt)("p",null,"Download both files to the same location. Then navigate to the specified path and check the downloaded files."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,o.kt)("p",null,"There are two files as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,o.kt)("p",null,"I will try to unzip it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,o.kt)("p",null,"And then run the component code using a Jupyter notebook.\n",(0,o.kt)("img",{alt:"debug-3.png",src:n(1219).Z,width:"2434",height:"1690"})),(0,o.kt)("p",null,"Debugging revealed that dropping the data was based on rows instead of columns, resulting in all the data being removed.\nNow that we know the cause of the problem, we can modify the component to drop based on columns."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,o.kt)("p",null,"After modifying, upload the pipeline again and run it to confirm that it is running normally as follows."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-6.png",src:n(5292).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},4904:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},2928:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},1219:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},9644:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},5292:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1512],{3905:(t,e,n)=>{n.d(e,{Zo:()=>s,kt:()=>f});var a=n(7294);function o(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function i(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,a)}return n}function r(t){for(var e=1;e=0||(o[n]=t[n]);return o}(t,e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(o[n]=t[n])}return o}var p=a.createContext({}),d=function(t){var e=a.useContext(p),n=e;return t&&(n="function"==typeof t?t(e):r(r({},e),t)),n},s=function(t){var e=d(t.components);return a.createElement(p.Provider,{value:e},t.children)},u="mdxType",c={inlineCode:"code",wrapper:function(t){var e=t.children;return a.createElement(a.Fragment,{},e)}},m=a.forwardRef((function(t,e){var n=t.components,o=t.mdxType,i=t.originalType,p=t.parentName,s=l(t,["components","mdxType","originalType","parentName"]),u=d(n),m=o,f=u["".concat(p,".").concat(m)]||u[m]||c[m]||i;return n?a.createElement(f,r(r({ref:e},s),{},{components:n})):a.createElement(f,r({ref:e},s))}));function f(t,e){var n=arguments,o=e&&e.mdxType;if("string"==typeof t||o){var i=n.length,r=new Array(i);r[0]=m;var l={};for(var p in e)hasOwnProperty.call(e,p)&&(l[p]=e[p]);l.originalType=t,l[u]="string"==typeof t?t:o,r[1]=l;for(var d=2;d{n.r(e),n.d(e,{assets:()=>p,contentTitle:()=>r,default:()=>c,frontMatter:()=>i,metadata:()=>l,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const i={title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},r=void 0,l={unversionedId:"kubeflow/how-to-debug",id:"version-1.0/kubeflow/how-to-debug",title:"13. Component - Debugging",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/how-to-debug.md",sourceDirName:"kubeflow",slug:"/kubeflow/how-to-debug",permalink:"/en/docs/1.0/kubeflow/how-to-debug",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/how-to-debug.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:13,frontMatter:{title:"13. Component - Debugging",description:"",sidebar_position:13,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"12. Component - MLFlow",permalink:"/en/docs/1.0/kubeflow/advanced-mlflow"},next:{title:"1. What is API Deployment?",permalink:"/en/docs/1.0/api-deployment/what-is-api-deployment"}},p={},d=[{value:"Debugging Pipeline",id:"debugging-pipeline",level:2},{value:"Failed Component",id:"failed-component",level:2}],s={toc:d},u="wrapper";function c(t){let{components:e,...i}=t;return(0,o.kt)(u,(0,a.Z)({},s,i,{components:e,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"debugging-pipeline"},"Debugging Pipeline"),(0,o.kt)("p",null,"This page covers how to debug Kubeflow components."),(0,o.kt)("h2",{id:"failed-component"},"Failed Component"),(0,o.kt)("p",null,"We will modify a pipeline used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-mlflow#mlflow-pipeline"},"Component - MLFlow")," in this page."),(0,o.kt)("p",null,"First, let's modify the pipeline so that the component fails."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n \n data["sepal length (cm)"] = None\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna()\n data.to_csv(output_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n\n@pipeline(name="debugging_pipeline")\ndef debugging_pipeline(kernel: str):\n iris_data = load_iris_data()\n drop_data = drop_na_from_csv(data=iris_data.outputs["data"])\n model = train_from_csv(\n train_data=drop_data.outputs["output"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(debugging_pipeline, "debugging_pipeline.yaml")\n\n')),(0,o.kt)("p",null,"The modifications are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"load_iris_data")," component for loading data, ",(0,o.kt)("inlineCode",{parentName:"li"},"None")," was injected into the ",(0,o.kt)("inlineCode",{parentName:"li"},"sepal length (cm)")," feature."),(0,o.kt)("li",{parentName:"ol"},"In the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na_from_csv")," component, use the ",(0,o.kt)("inlineCode",{parentName:"li"},"drop_na()")," function to remove rows with na values.")),(0,o.kt)("p",null,"Now let's upload and run the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","After running, if you press Run you will see that it has failed in the ",(0,o.kt)("inlineCode",{parentName:"p"},"Train from csv")," component."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-0.png",src:n(4904).Z,width:"2826",height:"1790"})),(0,o.kt)("p",null,"Click on the failed component and check the log to see the reason for the failure."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-2.png",src:n(2928).Z,width:"2826",height:"1796"})),(0,o.kt)("p",null,"If the log shows that the data count is 0 and the component did not run, there may be an issue with the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","Let's investigate what might be the problem."),(0,o.kt)("p",null,"First, click on the component and go to the Input/Output tab to download the input data.",(0,o.kt)("br",{parentName:"p"}),"\n","You can click on the link indicated by the red square to download the data."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-5.png",src:n(9644).Z,width:"2690",height:"1740"})),(0,o.kt)("p",null,"Download both files to the same location. Then navigate to the specified path and check the downloaded files."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ls\n")),(0,o.kt)("p",null,"There are two files as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"drop-na-from-csv-output.tgz load-iris-data-target.tgz\n")),(0,o.kt)("p",null,"I will try to unzip it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"tar -xzvf load-iris-data-target.tgz ; mv data target.csv\ntar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv\n")),(0,o.kt)("p",null,"And then run the component code using a Jupyter notebook.\n",(0,o.kt)("img",{alt:"debug-3.png",src:n(1219).Z,width:"2434",height:"1690"})),(0,o.kt)("p",null,"Debugging revealed that dropping the data was based on rows instead of columns, resulting in all the data being removed.\nNow that we know the cause of the problem, we can modify the component to drop based on columns."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'@partial(\n create_component_from_func,\n packages_to_install=["pandas"],\n)\ndef drop_na_from_csv(\n data_path: InputPath("csv"),\n output_path: OutputPath("csv"),\n):\n import pandas as pd\n\n data = pd.read_csv(data_path)\n data = data.dropna(axis="columns")\n data.to_csv(output_path, index=False)\n')),(0,o.kt)("p",null,"After modifying, upload the pipeline again and run it to confirm that it is running normally as follows."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"debug-6.png",src:n(5292).Z,width:"2694",height:"1748"})))}c.isMDXComponent=!0},4904:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-0-9ab1af1c9020a9dfc907d8d36dadac71.png"},2928:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-2-50081530b33b57206f6ef497212cf2a9.png"},1219:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-3-4fda7b9b4f2c366147cd6aeb124cc9c5.png"},9644:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-5-5b7edcc1e29c85f71b279af3f54f3f69.png"},5292:(t,e,n)=>{n.d(e,{Z:()=>a});const a=n.p+"assets/images/debug-6-e2da46f9318827a339b04097e68f635a.png"}}]); \ No newline at end of file diff --git a/en/assets/js/8111fb61.d690c202.js b/en/assets/js/8111fb61.c6ed6c12.js similarity index 99% rename from en/assets/js/8111fb61.d690c202.js rename to en/assets/js/8111fb61.c6ed6c12.js index b9e5bb6d..ee8c94b3 100644 --- a/en/assets/js/8111fb61.d690c202.js +++ b/en/assets/js/8111fb61.c6ed6c12.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9201],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),c=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),p=c(a),m=o,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||r;return a?n.createElement(k,i(i({ref:t},u),{},{components:a})):n.createElement(k,i({ref:t},u))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,i[1]=l;for(var c=2;c{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var n=a(7462),o=(a(7294),a(3905));const r={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"setup-components/install-components-kf",id:"version-1.0/setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/en/docs/1.0/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-kf.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/en/docs/1.0/setup-components/install-components-mlflow"}},s={},c=[{value:"Prepare the installation file",id:"prepare-the-installation-file",level:2},{value:"Install each components",id:"install-each-components",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"Check installation",id:"check-installation",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...r}=e;return(0,o.kt)(p,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prepare-the-installation-file"},"Prepare the installation file"),(0,o.kt)("p",null,"Prepare the installation files for installing Kubeflow ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")),(0,o.kt)("p",null,"Clone the ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," with the ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")," tag, and move to the corresponding folder."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,o.kt)("h2",{id:"install-each-components"},"Install each components"),(0,o.kt)("p",null,"The kubeflow/manifests repository provides installation commands for each component, but it often lacks information on potential issues that may arise during installation or how to verify if the installation was successful. This can make it challenging for first-time users.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, in this document, we will provide instructions on how to verify the successful installation of each component."),(0,o.kt)("p",null,"Please note that this document will not cover the installation of components that are not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", such as Knative, KFServing, and MPI Operator, as we prioritize efficient resource usage."),(0,o.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install cert-manager."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 3 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"To install ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer"),", run the following command:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")),(0,o.kt)("p",{parentName:"li"},"Note: If the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager-webhook")," deployment is not in the Running state, you may encounter an error similar to the one below, and the ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer")," may not be installed. In this case, please ensure that all 3 pods of cert-manager are Running before retrying the command.",(0,o.kt)("br",{parentName:"p"}),"\n","If you encounter the below error, make sure that the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager")," deployment and all its pods are running properly before proceeding."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,o.kt)("h3",{id:"istio"},"Istio"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install Custom Resource Definition(CRD) for istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio namespace"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 2 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,o.kt)("h3",{id:"dex"},"Dex"),(0,o.kt)("p",null,"Now, let's install dex."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,o.kt)("p",null,"Wait until all one pod in the auth namespace is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,o.kt)("p",null,"When everyone is running, similar results will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,o.kt)("p",null,"Install OIDC AuthService."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,o.kt)("p",null,"Wait until the authservice-0 pod in the istio-system namespace is Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,o.kt)("p",null,"If everybody runs, a similar result will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,o.kt)("p",null,"Create a Kubeflow Namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,o.kt)("p",null,"If generated normally, similar results will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,o.kt)("p",null,"Install kubeflow-roles."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If properly performed, it will output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,o.kt)("p",null,"Retrieve the kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,o.kt)("p",null,"The following 6 clusterroles will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,o.kt)("p",null,"Install Kubeflow Istio Resources."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,o.kt)("p",null,"The following three clusterroles are output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,o.kt)("p",null,"Check if the gateway is properly installed in the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,o.kt)("p",null,"If generated normally, a result similar to the following will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,o.kt)("p",null,"Installing Kubeflow Pipelines."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,o.kt)("p",null,"This command is installing multiple resources at once, but there are resources with dependencies on the installation order. Therefore, depending on the time, a similar error may occur."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,o.kt)("p",null,"If a similar error occurs, wait about 10 seconds and then try the command above again."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"Check to see if it has been installed correctly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,o.kt)("p",null,"Wait until all 16 pods are running as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,o.kt)("p",null,"Additionally, please check if the ml-pipeline UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/"),". Confirm that the following screen is displayed."),(0,o.kt)("p",null,'If you get the error "Connection refused on localhost", you can access it through the command line by setting the address, as long as there are no security issues. To check if the ml-pipeline UI connects normally, open the bind of all addresses with 0.0.0.0.'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Despite running with the above options, if connection refusal issues still occur, add access permission by allowing all TCP protocol ports in the firewall settings or by adding access permission to port 8888."),(0,o.kt)("p",null,"When you open the web browser and access the path ",(0,o.kt)("inlineCode",{parentName:"p"},"http://:8888/#/pipelines/"),", you can see the ml-pipeline UI screen."),(0,o.kt)("p",null,"When accessing the other ports path that is being processed in the bottom, run the command in the same way as above and add the port number to the firewall to run it."),(0,o.kt)("p",null,"English: We will install Katib."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,o.kt)("p",null,"Confirm if it has been installed properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,o.kt)("p",null,"Wait until four pods are Running, like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,o.kt)("p",null,"Additionally, we will confirm that the Katib UI is connected normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,o.kt)("p",null,"Open the web browser and access the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," to confirm the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,o.kt)("p",null,"Wait until one pod related to centraldashboard in the kubeflow namespace becomes Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,o.kt)("p",null,"Additionally, we will check if the Central Dashboard UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,o.kt)("p",null,"Open the web browser to connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," and check that the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,o.kt)("p",null,"Install the Notebook controller."),(0,o.kt)("p",null,"If done successfully, it will output as follows.\ndeployment.apps/notebook-controller created"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"\nA CustomResourceDefinition.apiextensions.k8s.io/notebooks.kubeflow.org, ServiceAccount/notebook-controller-service-account, Role.rbac.authorization.k8s.io/notebook-controller-leader-election-role, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view, ClusterRole.rbac.authorization.k8s.io/notebook-controller-role, RoleBinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding, ClusterRoleBinding.rbac.authorization.k8s.io/notebook-controller-role-binding, ConfigMap/notebook-controller-config-m\n\nTranslation: Check if the installation was successful. Wait until one pod is running with the following command: kubectl get po -n kubeflow | grep notebook-controller.\nTranslation: Install Jupyter Web App.\nIf performed correctly, the following will be output.\n")),(0,o.kt)("p",null," Confirm that the installation was successful:\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created"),(0,o.kt)("p",null,"Wait until one pod is Running."),(0,o.kt)("p",null,"English: We will install the Profile Controller."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,o.kt)("p",null,"Check to see if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,o.kt)("p",null,"Install the Volumes Web App."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Install Tensorboard Web App.\n\nService account/tensorboards-web-app-service-account created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created, Cluster role binding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created, Config map/tensorboards-web-app-parameters-g28fbd6cch created, Service/tensorboards-web-app-service created, Deployment.apps/tensorboards-web-app-deployment created, and Virtual service.networking.istio.io/t\nCheck if it is installed correctly.\n```bash\nDeployment "tensorboard-web-app-deployment-6ff79b7f44-qbzmw" created\ndeployment.apps/tensorboard-controller-controller-manager created\n')),(0,o.kt)("p",null,"A custom resource definition for 'tensorboards.tensorboard.kubeflow.org' was created, along with a service account, roles, role bindings, a config map, and a deployment for the controller manager metrics service.\nCheck if the deployment.apps/tensorboard-controller-controller-manager was installed correctly. Wait for 1 pod to be Running.\nTranslation: Installing Training Operator."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,o.kt)("p",null,"Wait until one pod is up and running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,o.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,o.kt)("p",null,"For using Kubeflow, create a Kubeflow Profile for the User to be used."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,o.kt)("p",null,"Confirm that the kubeflow-user-example-com profile has been created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,o.kt)("h2",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Confirm successful installation by port forwarding to access Kubeflow central dashboard with web browser."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,o.kt)("p",null,"Open a web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to confirm that the following screen is displayed.\n",(0,o.kt)("img",{alt:"login-ui",src:a(5140).Z,width:"2554",height:"1202"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email Address: ",(0,o.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"central-dashboard",src:a(8150).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},8150:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},5140:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9201],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),c=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),p=c(a),m=o,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||r;return a?n.createElement(k,i(i({ref:t},u),{},{components:a})):n.createElement(k,i({ref:t},u))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,i[1]=l;for(var c=2;c{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var n=a(7462),o=(a(7294),a(3905));const r={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"setup-components/install-components-kf",id:"version-1.0/setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/en/docs/1.0/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-kf.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/en/docs/1.0/setup-components/install-components-mlflow"}},s={},c=[{value:"Prepare the installation file",id:"prepare-the-installation-file",level:2},{value:"Install each components",id:"install-each-components",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"Check installation",id:"check-installation",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...r}=e;return(0,o.kt)(p,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prepare-the-installation-file"},"Prepare the installation file"),(0,o.kt)("p",null,"Prepare the installation files for installing Kubeflow ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")),(0,o.kt)("p",null,"Clone the ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," with the ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")," tag, and move to the corresponding folder."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,o.kt)("h2",{id:"install-each-components"},"Install each components"),(0,o.kt)("p",null,"The kubeflow/manifests repository provides installation commands for each component, but it often lacks information on potential issues that may arise during installation or how to verify if the installation was successful. This can make it challenging for first-time users.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, in this document, we will provide instructions on how to verify the successful installation of each component."),(0,o.kt)("p",null,"Please note that this document will not cover the installation of components that are not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", such as Knative, KFServing, and MPI Operator, as we prioritize efficient resource usage."),(0,o.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install cert-manager."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 3 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"To install ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer"),", run the following command:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")),(0,o.kt)("p",{parentName:"li"},"Note: If the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager-webhook")," deployment is not in the Running state, you may encounter an error similar to the one below, and the ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer")," may not be installed. In this case, please ensure that all 3 pods of cert-manager are Running before retrying the command.",(0,o.kt)("br",{parentName:"p"}),"\n","If you encounter the below error, make sure that the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager")," deployment and all its pods are running properly before proceeding."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,o.kt)("h3",{id:"istio"},"Istio"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install Custom Resource Definition(CRD) for istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio namespace"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 2 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,o.kt)("h3",{id:"dex"},"Dex"),(0,o.kt)("p",null,"Now, let's install dex."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,o.kt)("p",null,"Wait until all one pod in the auth namespace is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,o.kt)("p",null,"When everyone is running, similar results will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,o.kt)("p",null,"Install OIDC AuthService."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,o.kt)("p",null,"Wait until the authservice-0 pod in the istio-system namespace is Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,o.kt)("p",null,"If everybody runs, a similar result will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,o.kt)("p",null,"Create a Kubeflow Namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,o.kt)("p",null,"If generated normally, similar results will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,o.kt)("p",null,"Install kubeflow-roles."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If properly performed, it will output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,o.kt)("p",null,"Retrieve the kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,o.kt)("p",null,"The following 6 clusterroles will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,o.kt)("p",null,"Install Kubeflow Istio Resources."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,o.kt)("p",null,"The following three clusterroles are output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,o.kt)("p",null,"Check if the gateway is properly installed in the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,o.kt)("p",null,"If generated normally, a result similar to the following will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,o.kt)("p",null,"Installing Kubeflow Pipelines."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,o.kt)("p",null,"This command is installing multiple resources at once, but there are resources with dependencies on the installation order. Therefore, depending on the time, a similar error may occur."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,o.kt)("p",null,"If a similar error occurs, wait about 10 seconds and then try the command above again."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"Check to see if it has been installed correctly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,o.kt)("p",null,"Wait until all 16 pods are running as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,o.kt)("p",null,"Additionally, please check if the ml-pipeline UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/"),". Confirm that the following screen is displayed."),(0,o.kt)("p",null,'If you get the error "Connection refused on localhost", you can access it through the command line by setting the address, as long as there are no security issues. To check if the ml-pipeline UI connects normally, open the bind of all addresses with 0.0.0.0.'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Despite running with the above options, if connection refusal issues still occur, add access permission by allowing all TCP protocol ports in the firewall settings or by adding access permission to port 8888."),(0,o.kt)("p",null,"When you open the web browser and access the path ",(0,o.kt)("inlineCode",{parentName:"p"},"http://:8888/#/pipelines/"),", you can see the ml-pipeline UI screen."),(0,o.kt)("p",null,"When accessing the other ports path that is being processed in the bottom, run the command in the same way as above and add the port number to the firewall to run it."),(0,o.kt)("p",null,"English: We will install Katib."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,o.kt)("p",null,"Confirm if it has been installed properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,o.kt)("p",null,"Wait until four pods are Running, like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,o.kt)("p",null,"Additionally, we will confirm that the Katib UI is connected normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,o.kt)("p",null,"Open the web browser and access the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," to confirm the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,o.kt)("p",null,"Wait until one pod related to centraldashboard in the kubeflow namespace becomes Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,o.kt)("p",null,"Additionally, we will check if the Central Dashboard UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,o.kt)("p",null,"Open the web browser to connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," and check that the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,o.kt)("p",null,"Install the Notebook controller."),(0,o.kt)("p",null,"If done successfully, it will output as follows.\ndeployment.apps/notebook-controller created"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"\nA CustomResourceDefinition.apiextensions.k8s.io/notebooks.kubeflow.org, ServiceAccount/notebook-controller-service-account, Role.rbac.authorization.k8s.io/notebook-controller-leader-election-role, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view, ClusterRole.rbac.authorization.k8s.io/notebook-controller-role, RoleBinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding, ClusterRoleBinding.rbac.authorization.k8s.io/notebook-controller-role-binding, ConfigMap/notebook-controller-config-m\n\nTranslation: Check if the installation was successful. Wait until one pod is running with the following command: kubectl get po -n kubeflow | grep notebook-controller.\nTranslation: Install Jupyter Web App.\nIf performed correctly, the following will be output.\n")),(0,o.kt)("p",null," Confirm that the installation was successful:\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created"),(0,o.kt)("p",null,"Wait until one pod is Running."),(0,o.kt)("p",null,"English: We will install the Profile Controller."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,o.kt)("p",null,"Check to see if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,o.kt)("p",null,"Install the Volumes Web App."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Install Tensorboard Web App.\n\nService account/tensorboards-web-app-service-account created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created, Cluster role binding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created, Config map/tensorboards-web-app-parameters-g28fbd6cch created, Service/tensorboards-web-app-service created, Deployment.apps/tensorboards-web-app-deployment created, and Virtual service.networking.istio.io/t\nCheck if it is installed correctly.\n```bash\nDeployment "tensorboard-web-app-deployment-6ff79b7f44-qbzmw" created\ndeployment.apps/tensorboard-controller-controller-manager created\n')),(0,o.kt)("p",null,"A custom resource definition for 'tensorboards.tensorboard.kubeflow.org' was created, along with a service account, roles, role bindings, a config map, and a deployment for the controller manager metrics service.\nCheck if the deployment.apps/tensorboard-controller-controller-manager was installed correctly. Wait for 1 pod to be Running.\nTranslation: Installing Training Operator."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,o.kt)("p",null,"Wait until one pod is up and running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,o.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,o.kt)("p",null,"For using Kubeflow, create a Kubeflow Profile for the User to be used."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,o.kt)("p",null,"Confirm that the kubeflow-user-example-com profile has been created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,o.kt)("h2",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Confirm successful installation by port forwarding to access Kubeflow central dashboard with web browser."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,o.kt)("p",null,"Open a web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to confirm that the following screen is displayed.\n",(0,o.kt)("img",{alt:"login-ui",src:a(5140).Z,width:"2554",height:"1202"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email Address: ",(0,o.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"central-dashboard",src:a(8150).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},8150:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},5140:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"}}]); \ No newline at end of file diff --git a/en/assets/js/81a7ed24.b0469b80.js b/en/assets/js/81a7ed24.be3cd94e.js similarity index 99% rename from en/assets/js/81a7ed24.b0469b80.js rename to en/assets/js/81a7ed24.be3cd94e.js index 566beaa1..c0748203 100644 --- a/en/assets/js/81a7ed24.b0469b80.js +++ b/en/assets/js/81a7ed24.be3cd94e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5520],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),c=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),p=c(a),m=o,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||r;return a?n.createElement(k,i(i({ref:t},u),{},{components:a})):n.createElement(k,i({ref:t},u))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,i[1]=l;for(var c=2;c{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var n=a(7462),o=(a(7294),a(3905));const r={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"setup-components/install-components-kf",id:"setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/en/docs/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-kf.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/en/docs/setup-components/install-components-mlflow"}},s={},c=[{value:"Prepare the installation file",id:"prepare-the-installation-file",level:2},{value:"Install each components",id:"install-each-components",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"Check installation",id:"check-installation",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...r}=e;return(0,o.kt)(p,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prepare-the-installation-file"},"Prepare the installation file"),(0,o.kt)("p",null,"Prepare the installation files for installing Kubeflow ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")),(0,o.kt)("p",null,"Clone the ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," with the ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")," tag, and move to the corresponding folder."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,o.kt)("h2",{id:"install-each-components"},"Install each components"),(0,o.kt)("p",null,"The kubeflow/manifests repository provides installation commands for each component, but it often lacks information on potential issues that may arise during installation or how to verify if the installation was successful. This can make it challenging for first-time users.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, in this document, we will provide instructions on how to verify the successful installation of each component."),(0,o.kt)("p",null,"Please note that this document will not cover the installation of components that are not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", such as Knative, KFServing, and MPI Operator, as we prioritize efficient resource usage."),(0,o.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install cert-manager."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 3 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"To install ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer"),", run the following command:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")),(0,o.kt)("p",{parentName:"li"},"Note: If the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager-webhook")," deployment is not in the Running state, you may encounter an error similar to the one below, and the ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer")," may not be installed. In this case, please ensure that all 3 pods of cert-manager are Running before retrying the command.",(0,o.kt)("br",{parentName:"p"}),"\n","If you encounter the below error, make sure that the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager")," deployment and all its pods are running properly before proceeding."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,o.kt)("h3",{id:"istio"},"Istio"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install Custom Resource Definition(CRD) for istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio namespace"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 2 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,o.kt)("h3",{id:"dex"},"Dex"),(0,o.kt)("p",null,"Now, let's install dex."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,o.kt)("p",null,"Wait until all one pod in the auth namespace is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,o.kt)("p",null,"When everyone is running, similar results will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,o.kt)("p",null,"Install OIDC AuthService."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,o.kt)("p",null,"Wait until the authservice-0 pod in the istio-system namespace is Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,o.kt)("p",null,"If everybody runs, a similar result will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,o.kt)("p",null,"Create a Kubeflow Namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,o.kt)("p",null,"If generated normally, similar results will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,o.kt)("p",null,"Install kubeflow-roles."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If properly performed, it will output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,o.kt)("p",null,"Retrieve the kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,o.kt)("p",null,"The following 6 clusterroles will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,o.kt)("p",null,"Install Kubeflow Istio Resources."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,o.kt)("p",null,"The following three clusterroles are output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,o.kt)("p",null,"Check if the gateway is properly installed in the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,o.kt)("p",null,"If generated normally, a result similar to the following will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,o.kt)("p",null,"Installing Kubeflow Pipelines."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,o.kt)("p",null,"This command is installing multiple resources at once, but there are resources with dependencies on the installation order. Therefore, depending on the time, a similar error may occur."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,o.kt)("p",null,"If a similar error occurs, wait about 10 seconds and then try the command above again."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"Check to see if it has been installed correctly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,o.kt)("p",null,"Wait until all 16 pods are running as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,o.kt)("p",null,"Additionally, please check if the ml-pipeline UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/"),". Confirm that the following screen is displayed."),(0,o.kt)("p",null,'If you get the error "Connection refused on localhost", you can access it through the command line by setting the address, as long as there are no security issues. To check if the ml-pipeline UI connects normally, open the bind of all addresses with 0.0.0.0.'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Despite running with the above options, if connection refusal issues still occur, add access permission by allowing all TCP protocol ports in the firewall settings or by adding access permission to port 8888."),(0,o.kt)("p",null,"When you open the web browser and access the path ",(0,o.kt)("inlineCode",{parentName:"p"},"http://:8888/#/pipelines/"),", you can see the ml-pipeline UI screen."),(0,o.kt)("p",null,"When accessing the other ports path that is being processed in the bottom, run the command in the same way as above and add the port number to the firewall to run it."),(0,o.kt)("p",null,"English: We will install Katib."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,o.kt)("p",null,"Confirm if it has been installed properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,o.kt)("p",null,"Wait until four pods are Running, like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,o.kt)("p",null,"Additionally, we will confirm that the Katib UI is connected normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,o.kt)("p",null,"Open the web browser and access the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," to confirm the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,o.kt)("p",null,"Wait until one pod related to centraldashboard in the kubeflow namespace becomes Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,o.kt)("p",null,"Additionally, we will check if the Central Dashboard UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,o.kt)("p",null,"Open the web browser to connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," and check that the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,o.kt)("p",null,"Install the Notebook controller."),(0,o.kt)("p",null,"If done successfully, it will output as follows.\ndeployment.apps/notebook-controller created"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"\nA CustomResourceDefinition.apiextensions.k8s.io/notebooks.kubeflow.org, ServiceAccount/notebook-controller-service-account, Role.rbac.authorization.k8s.io/notebook-controller-leader-election-role, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view, ClusterRole.rbac.authorization.k8s.io/notebook-controller-role, RoleBinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding, ClusterRoleBinding.rbac.authorization.k8s.io/notebook-controller-role-binding, ConfigMap/notebook-controller-config-m\n\nTranslation: Check if the installation was successful. Wait until one pod is running with the following command: kubectl get po -n kubeflow | grep notebook-controller.\nTranslation: Install Jupyter Web App.\nIf performed correctly, the following will be output.\n")),(0,o.kt)("p",null," Confirm that the installation was successful:\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created"),(0,o.kt)("p",null,"Wait until one pod is Running."),(0,o.kt)("p",null,"English: We will install the Profile Controller."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,o.kt)("p",null,"Check to see if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,o.kt)("p",null,"Install the Volumes Web App."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Install Tensorboard Web App.\n\nService account/tensorboards-web-app-service-account created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created, Cluster role binding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created, Config map/tensorboards-web-app-parameters-g28fbd6cch created, Service/tensorboards-web-app-service created, Deployment.apps/tensorboards-web-app-deployment created, and Virtual service.networking.istio.io/t\nCheck if it is installed correctly.\n```bash\nDeployment "tensorboard-web-app-deployment-6ff79b7f44-qbzmw" created\ndeployment.apps/tensorboard-controller-controller-manager created\n')),(0,o.kt)("p",null,"A custom resource definition for 'tensorboards.tensorboard.kubeflow.org' was created, along with a service account, roles, role bindings, a config map, and a deployment for the controller manager metrics service.\nCheck if the deployment.apps/tensorboard-controller-controller-manager was installed correctly. Wait for 1 pod to be Running.\nTranslation: Installing Training Operator."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,o.kt)("p",null,"Wait until one pod is up and running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,o.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,o.kt)("p",null,"For using Kubeflow, create a Kubeflow Profile for the User to be used."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,o.kt)("p",null,"Confirm that the kubeflow-user-example-com profile has been created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,o.kt)("h2",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Confirm successful installation by port forwarding to access Kubeflow central dashboard with web browser."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,o.kt)("p",null,"Open a web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to confirm that the following screen is displayed.\n",(0,o.kt)("img",{alt:"login-ui",src:a(2396).Z,width:"2554",height:"1202"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email Address: ",(0,o.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"central-dashboard",src:a(5601).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},5601:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},2396:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5520],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function o(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function i(e){for(var t=1;t=0||(o[a]=e[a]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=n.createContext({}),c=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):i(i({},t),e)),a},u=function(e){var t=c(e.components);return n.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,o=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),p=c(a),m=o,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||r;return a?n.createElement(k,i(i({ref:t},u),{},{components:a})):n.createElement(k,i({ref:t},u))}));function k(e,t){var a=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=a.length,i=new Array(r);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,i[1]=l;for(var c=2;c{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>c});var n=a(7462),o=(a(7294),a(3905));const r={title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"setup-components/install-components-kf",id:"setup-components/install-components-kf",title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-components/install-components-kf.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-kf",permalink:"/en/docs/setup-components/install-components-kf",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-components/install-components-kf.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Kubeflow",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/setup-kubernetes/setup-nvidia-gpu"},next:{title:"2. MLflow Tracking Server",permalink:"/en/docs/setup-components/install-components-mlflow"}},s={},c=[{value:"Prepare the installation file",id:"prepare-the-installation-file",level:2},{value:"Install each components",id:"install-each-components",level:2},{value:"Cert-manager",id:"cert-manager",level:3},{value:"Istio",id:"istio",level:3},{value:"Dex",id:"dex",level:3},{value:"User Namespace",id:"user-namespace",level:3},{value:"Check installation",id:"check-installation",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...r}=e;return(0,o.kt)(p,(0,n.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"prepare-the-installation-file"},"Prepare the installation file"),(0,o.kt)("p",null,"Prepare the installation files for installing Kubeflow ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")),(0,o.kt)("p",null,"Clone the ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/kubeflow/manifests"},"kubeflow/manifests Repository")," with the ",(0,o.kt)("strong",{parentName:"p"},"v1.4.0")," tag, and move to the corresponding folder."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"git clone -b v1.4.0 https://github.com/kubeflow/manifests.git\ncd manifests\n")),(0,o.kt)("h2",{id:"install-each-components"},"Install each components"),(0,o.kt)("p",null,"The kubeflow/manifests repository provides installation commands for each component, but it often lacks information on potential issues that may arise during installation or how to verify if the installation was successful. This can make it challenging for first-time users.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, in this document, we will provide instructions on how to verify the successful installation of each component."),(0,o.kt)("p",null,"Please note that this document will not cover the installation of components that are not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", such as Knative, KFServing, and MPI Operator, as we prioritize efficient resource usage."),(0,o.kt)("h3",{id:"cert-manager"},"Cert-manager"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install cert-manager."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/cert-manager/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/cert-manager created\ncustomresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created\ncustomresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created\nserviceaccount/cert-manager created\nserviceaccount/cert-manager-cainjector created\nserviceaccount/cert-manager-webhook created\nrole.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrole.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrole.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrole.rbac.authorization.k8s.io/cert-manager-edit created\nclusterrole.rbac.authorization.k8s.io/cert-manager-view created\nclusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created\nrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created\nrolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created\nclusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created\nservice/cert-manager created\nservice/cert-manager-webhook created\ndeployment.apps/cert-manager created\ndeployment.apps/cert-manager-cainjector created\ndeployment.apps/cert-manager-webhook created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 3 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n cert-manager\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncert-manager-7dd5854bb4-7nmpd 1/1 Running 0 2m10s\ncert-manager-cainjector-64c949654c-2scxr 1/1 Running 0 2m10s\ncert-manager-webhook-6b57b9b886-7q6g2 1/1 Running 0 2m10s\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"To install ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer"),", run the following command:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/cert-manager/kubeflow-issuer/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"If the installation is successful, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterissuer.cert-manager.io/kubeflow-self-signing-issuer created\n")),(0,o.kt)("p",{parentName:"li"},"Note: If the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager-webhook")," deployment is not in the Running state, you may encounter an error similar to the one below, and the ",(0,o.kt)("inlineCode",{parentName:"p"},"kubeflow-issuer")," may not be installed. In this case, please ensure that all 3 pods of cert-manager are Running before retrying the command.",(0,o.kt)("br",{parentName:"p"}),"\n","If you encounter the below error, make sure that the ",(0,o.kt)("inlineCode",{parentName:"p"},"cert-manager")," deployment and all its pods are running properly before proceeding."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Error from server: error when retrieving current configuration of:\nResource: "cert-manager.io/v1alpha2, Resource=clusterissuers", GroupVersionKind: "cert-manager.io/v1alpha2, Kind=ClusterIssuer"\nName: "kubeflow-self-signing-issuer", Namespace: ""\nfrom server for: "STDIN": conversion webhook for cert-manager.io/v1, Kind=ClusterIssuer failed: Post "https://cert-manager-webhook.cert-manager.svc:443/convert?timeout=30s": dial tcp 10.101.177.157:443: connect: connection refused\n')))),(0,o.kt)("h3",{id:"istio"},"Istio"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install Custom Resource Definition(CRD) for istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-crds/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/authorizationpolicies.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/destinationrules.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/envoyfilters.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/gateways.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/istiooperators.install.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/peerauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/requestauthentications.security.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/serviceentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/sidecars.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/virtualservices.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadentries.networking.istio.io created\ncustomresourcedefinition.apiextensions.k8s.io/workloadgroups.networking.istio.io created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio namespace"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/istio-system created\n"))),(0,o.kt)("li",{parentName:"ol"},(0,o.kt)("p",{parentName:"li"},"Install istio."),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/istio-install/base | kubectl apply -f -\n")),(0,o.kt)("p",{parentName:"li"},"if run properly, you should see the following output:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/istio-ingressgateway-service-account created\nserviceaccount/istio-reader-service-account created\nserviceaccount/istiod-service-account created\nrole.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrole.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrole.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrole.rbac.authorization.k8s.io/istiod-istio-system created\nrolebinding.rbac.authorization.k8s.io/istio-ingressgateway-sds created\nrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istio-reader-istio-system created\nclusterrolebinding.rbac.authorization.k8s.io/istiod-istio-system created\nconfigmap/istio created\nconfigmap/istio-sidecar-injector created\nservice/istio-ingressgateway created\nservice/istiod created\ndeployment.apps/istio-ingressgateway created\ndeployment.apps/istiod created\nenvoyfilter.networking.istio.io/metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/stats-filter-1.8 created\nenvoyfilter.networking.istio.io/stats-filter-1.9 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.8 created\nenvoyfilter.networking.istio.io/tcp-metadata-exchange-1.9 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.8 created\nenvoyfilter.networking.istio.io/tcp-stats-filter-1.9 created\nenvoyfilter.networking.istio.io/x-forwarded-host created\ngateway.networking.istio.io/istio-ingressgateway created\nauthorizationpolicy.security.istio.io/global-deny-all created\nauthorizationpolicy.security.istio.io/istio-ingressgateway created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/istio-sidecar-injector created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/istiod-istio-system created\n")),(0,o.kt)("p",{parentName:"li"},"Wait for all 2 pods in the cert-manager namespace to become Running:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system\n")),(0,o.kt)("p",{parentName:"li"},"Once all the pods are Running, you should see output similar to the following:"),(0,o.kt)("pre",{parentName:"li"},(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 16s\nistiod-86457659bb-5h58w 1/1 Running 0 16s\n")))),(0,o.kt)("h3",{id:"dex"},"Dex"),(0,o.kt)("p",null,"Now, let's install dex."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/dex/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows:"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/auth created\ncustomresourcedefinition.apiextensions.k8s.io/authcodes.dex.coreos.com created\nserviceaccount/dex created\nclusterrole.rbac.authorization.k8s.io/dex created\nclusterrolebinding.rbac.authorization.k8s.io/dex created\nconfigmap/dex created\nsecret/dex-oidc-client created\nservice/dex created\ndeployment.apps/dex created\nvirtualservice.networking.istio.io/dex created\n")),(0,o.kt)("p",null,"Wait until all one pod in the auth namespace is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n auth\n")),(0,o.kt)("p",null,"When everyone is running, similar results will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ndex-5ddf47d88d-458cs 1/1 Running 1 12s\n")),(0,o.kt)("p",null,"Install OIDC AuthService."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/oidc-authservice/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be printed as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/oidc-authservice-parameters created\nsecret/oidc-authservice-client created\nservice/authservice created\npersistentvolumeclaim/authservice-pvc created\nstatefulset.apps/authservice created\nenvoyfilter.networking.istio.io/authn-filter created\n")),(0,o.kt)("p",null,"Wait until the authservice-0 pod in the istio-system namespace is Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n istio-system -w\n")),(0,o.kt)("p",null,"If everybody runs, a similar result will be printed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nauthservice-0 1/1 Running 0 14s\nistio-ingressgateway-79b665c95-xm22l 1/1 Running 0 2m37s\nistiod-86457659bb-5h58w 1/1 Running 0 2m37s\n")),(0,o.kt)("p",null,"Create a Kubeflow Namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/kubeflow created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get ns kubeflow\n")),(0,o.kt)("p",null,"If generated normally, similar results will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS AGE\nkubeflow Active 8s\n")),(0,o.kt)("p",null,"Install kubeflow-roles."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/kubeflow-roles/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If properly performed, it will output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-kubernetes-view created\nclusterrole.rbac.authorization.k8s.io/kubeflow-view created\n")),(0,o.kt)("p",null,"Retrieve the kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow\n")),(0,o.kt)("p",null,"The following 6 clusterroles will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-admin 2021-12-03T08:51:36Z\nkubeflow-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-admin 2021-12-03T08:51:36Z\nkubeflow-kubernetes-edit 2021-12-03T08:51:36Z\nkubeflow-kubernetes-view 2021-12-03T08:51:36Z\nkubeflow-view 2021-12-03T08:51:36Z\n")),(0,o.kt)("p",null,"Install Kubeflow Istio Resources."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/istio-1-9/kubeflow-istio-resources/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"clusterrole.rbac.authorization.k8s.io/kubeflow-istio-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-istio-view created\ngateway.networking.istio.io/kubeflow-gateway created\n")),(0,o.kt)("p",null,"Retrieve the Kubeflow roles just created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get clusterrole | grep kubeflow-istio\n")),(0,o.kt)("p",null,"The following three clusterroles are output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-istio-admin 2021-12-03T08:53:17Z\nkubeflow-istio-edit 2021-12-03T08:53:17Z\nkubeflow-istio-view 2021-12-03T08:53:17Z\n")),(0,o.kt)("p",null,"Check if the gateway is properly installed in the Kubeflow namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get gateway -n kubeflow\n")),(0,o.kt)("p",null,"If generated normally, a result similar to the following will be output."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME AGE\nkubeflow-gateway 31s\n")),(0,o.kt)("p",null,"Installing Kubeflow Pipelines."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/clusterworkflowtemplates.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/cronworkflows.argoproj.io created\ncustomresourcedefinition.apiextensions.k8s.io/workfloweventbindings.argoproj.io created\n...(\uc0dd\ub7b5)\nauthorizationpolicy.security.istio.io/ml-pipeline-visualizationserver created\nauthorizationpolicy.security.istio.io/mysql created\nauthorizationpolicy.security.istio.io/service-cache-server created\n")),(0,o.kt)("p",null,"This command is installing multiple resources at once, but there are resources with dependencies on the installation order. Therefore, depending on the time, a similar error may occur."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"error: unable to recognize "STDIN": no matches for kind "CompositeController" in version "metacontroller.k8s.io/v1alpha1"" \n')),(0,o.kt)("p",null,"If a similar error occurs, wait about 10 seconds and then try the command above again."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/pipeline/upstream/env/platform-agnostic-multi-user | kubectl apply -f -\n")),(0,o.kt)("p",null,"Check to see if it has been installed correctly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow\n")),(0,o.kt)("p",null,"Wait until all 16 pods are running as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\ncache-deployer-deployment-79fdf9c5c9-bjnbg 2/2 Running 1 5m3s\ncache-server-5bdf4f4457-48gbp 2/2 Running 0 5m3s\nkubeflow-pipelines-profile-controller-7b947f4748-8d26b 1/1 Running 0 5m3s\nmetacontroller-0 1/1 Running 0 5m3s\nmetadata-envoy-deployment-5b4856dd5-xtlkd 1/1 Running 0 5m3s\nmetadata-grpc-deployment-6b5685488-kwvv7 2/2 Running 3 5m3s\nmetadata-writer-548bd879bb-zjkcn 2/2 Running 1 5m3s\nminio-5b65df66c9-k5gzg 2/2 Running 0 5m3s\nml-pipeline-8c4b99589-85jw6 2/2 Running 1 5m3s\nml-pipeline-persistenceagent-d6bdc77bd-ssxrv 2/2 Running 0 5m3s\nml-pipeline-scheduledworkflow-5db54d75c5-zk2cw 2/2 Running 0 5m2s\nml-pipeline-ui-5bd8d6dc84-j7wqr 2/2 Running 0 5m2s\nml-pipeline-viewer-crd-68fb5f4d58-mbcbg 2/2 Running 1 5m2s\nml-pipeline-visualizationserver-8476b5c645-wljfm 2/2 Running 0 5m2s\nmysql-f7b9b7dd4-xfnw4 2/2 Running 0 5m2s\nworkflow-controller-5cbbb49bd8-5zrwx 2/2 Running 1 5m2s\n")),(0,o.kt)("p",null,"Additionally, please check if the ml-pipeline UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Open the web browser and connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8888/#/pipelines/"},"http://localhost:8888/#/pipelines/"),". Confirm that the following screen is displayed."),(0,o.kt)("p",null,'If you get the error "Connection refused on localhost", you can access it through the command line by setting the address, as long as there are no security issues. To check if the ml-pipeline UI connects normally, open the bind of all addresses with 0.0.0.0.'),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/ml-pipeline-ui -n kubeflow 8888:80\n")),(0,o.kt)("p",null,"Despite running with the above options, if connection refusal issues still occur, add access permission by allowing all TCP protocol ports in the firewall settings or by adding access permission to port 8888."),(0,o.kt)("p",null,"When you open the web browser and access the path ",(0,o.kt)("inlineCode",{parentName:"p"},"http://:8888/#/pipelines/"),", you can see the ml-pipeline UI screen."),(0,o.kt)("p",null,"When accessing the other ports path that is being processed in the bottom, run the command in the same way as above and add the port number to the firewall to run it."),(0,o.kt)("p",null,"English: We will install Katib."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/katib/upstream/installs/katib-with-kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/experiments.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/suggestions.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/trials.kubeflow.org created\nserviceaccount/katib-controller created\nserviceaccount/katib-ui created\nclusterrole.rbac.authorization.k8s.io/katib-controller created\nclusterrole.rbac.authorization.k8s.io/katib-ui created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-katib-view created\nclusterrolebinding.rbac.authorization.k8s.io/katib-controller created\nclusterrolebinding.rbac.authorization.k8s.io/katib-ui created\nconfigmap/katib-config created\nconfigmap/trial-templates created\nsecret/katib-mysql-secrets created\nservice/katib-controller created\nservice/katib-db-manager created\nservice/katib-mysql created\nservice/katib-ui created\npersistentvolumeclaim/katib-mysql created\ndeployment.apps/katib-controller created\ndeployment.apps/katib-db-manager created\ndeployment.apps/katib-mysql created\ndeployment.apps/katib-ui created\ncertificate.cert-manager.io/katib-webhook-cert created\nissuer.cert-manager.io/katib-selfsigned-issuer created\nvirtualservice.networking.istio.io/katib-ui created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\nvalidatingwebhookconfiguration.admissionregistration.k8s.io/katib.kubeflow.org created\n")),(0,o.kt)("p",null,"Confirm if it has been installed properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep katib\n")),(0,o.kt)("p",null,"Wait until four pods are Running, like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"katib-controller-68c47fbf8b-b985z 1/1 Running 0 82s\nkatib-db-manager-6c948b6b76-2d9gr 1/1 Running 0 82s\nkatib-mysql-7894994f88-scs62 1/1 Running 0 82s\nkatib-ui-64bb96d5bf-d89kp 1/1 Running 0 82s\n")),(0,o.kt)("p",null,"Additionally, we will confirm that the Katib UI is connected normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/katib-ui -n kubeflow 8081:80\n")),(0,o.kt)("p",null,"Open the web browser and access the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8081/katib/"},"http://localhost:8081/katib/")," to confirm the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/centraldashboard/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/centraldashboard created\nrole.rbac.authorization.k8s.io/centraldashboard created\nclusterrole.rbac.authorization.k8s.io/centraldashboard created\nrolebinding.rbac.authorization.k8s.io/centraldashboard created\nclusterrolebinding.rbac.authorization.k8s.io/centraldashboard created\nconfigmap/centraldashboard-config created\nconfigmap/centraldashboard-parameters created\nservice/centraldashboard created\ndeployment.apps/centraldashboard created\nvirtualservice.networking.istio.io/centraldashboard created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep centraldashboard\n")),(0,o.kt)("p",null,"Wait until one pod related to centraldashboard in the kubeflow namespace becomes Running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"centraldashboard-8fc7d8cc-xl7ts 1/1 Running 0 52s\n")),(0,o.kt)("p",null,"Additionally, we will check if the Central Dashboard UI is connected properly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/centraldashboard -n kubeflow 8082:80\n")),(0,o.kt)("p",null,"Open the web browser to connect to the path ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8082/"},"http://localhost:8082/")," and check that the following screen is displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/admission-webhook/upstream/overlays/cert-manager | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/poddefaults.kubeflow.org created\nserviceaccount/admission-webhook-service-account created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-cluster-role created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-admin created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-edit created\nclusterrole.rbac.authorization.k8s.io/admission-webhook-kubeflow-poddefaults-view created\nclusterrolebinding.rbac.authorization.k8s.io/admission-webhook-cluster-role-binding created\nservice/admission-webhook-service created\ndeployment.apps/admission-webhook-deployment created\ncertificate.cert-manager.io/admission-webhook-cert created\nissuer.cert-manager.io/admission-webhook-selfsigned-issuer created\nmutatingwebhookconfiguration.admissionregistration.k8s.io/admission-webhook-mutating-webhook-configuration created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep admission-webhook\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"admission-webhook-deployment-667bd68d94-2hhrx 1/1 Running 0 11s\n")),(0,o.kt)("p",null,"Install the Notebook controller."),(0,o.kt)("p",null,"If done successfully, it will output as follows.\ndeployment.apps/notebook-controller created"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre"},"\nA CustomResourceDefinition.apiextensions.k8s.io/notebooks.kubeflow.org, ServiceAccount/notebook-controller-service-account, Role.rbac.authorization.k8s.io/notebook-controller-leader-election-role, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-admin, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-edit, ClusterRole.rbac.authorization.k8s.io/notebook-controller-kubeflow-notebooks-view, ClusterRole.rbac.authorization.k8s.io/notebook-controller-role, RoleBinding.rbac.authorization.k8s.io/notebook-controller-leader-election-rolebinding, ClusterRoleBinding.rbac.authorization.k8s.io/notebook-controller-role-binding, ConfigMap/notebook-controller-config-m\n\nTranslation: Check if the installation was successful. Wait until one pod is running with the following command: kubectl get po -n kubeflow | grep notebook-controller.\nTranslation: Install Jupyter Web App.\nIf performed correctly, the following will be output.\n")),(0,o.kt)("p",null," Confirm that the installation was successful:\nconfigmap/jupyter-web-app-config-76844k4cd7 created\nconfigmap/jupyter-web-app-logos created\nconfigmap/jupyter-web-app-parameters-chmg88cm48 created\nservice/jupyter-web-app-service created\ndeployment.apps/jupyter-web-app-deployment created\nvirtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created"),(0,o.kt)("p",null,"Wait until one pod is Running."),(0,o.kt)("p",null,"English: We will install the Profile Controller."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created\nserviceaccount/profiles-controller-service-account created\nrole.rbac.authorization.k8s.io/profiles-leader-election-role created\nrolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created\nclusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created\nconfigmap/namespace-labels-data-48h7kd55mc created\nconfigmap/profiles-config-46c7tgh6fd created\nservice/profiles-kfam created\ndeployment.apps/profiles-deployment created\nvirtualservice.networking.istio.io/profiles-kfam created\n")),(0,o.kt)("p",null,"Check to see if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep profiles-deployment\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"profiles-deployment-89f7d88b-qsnrd 2/2 Running 0 42s\n")),(0,o.kt)("p",null,"Install the Volumes Web App."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"serviceaccount/volumes-web-app-service-account created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created\nclusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created\nclusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created\nconfigmap/volumes-web-app-parameters-4gg8cm2gmk created\nservice/volumes-web-app-service created\ndeployment.apps/volumes-web-app-deployment created\nvirtualservice.networking.istio.io/volumes-web-app-volumes-web-app created\n")),(0,o.kt)("p",null,"Check if it is installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep volumes-web-app\n")),(0,o.kt)("p",null,"Wait until one pod is running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"volumes-web-app-deployment-8589d664cc-62svl 1/1 Running 0 27s\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Install Tensorboard Web App.\n\nService account/tensorboards-web-app-service-account created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created, Cluster role binding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created, Config map/tensorboards-web-app-parameters-g28fbd6cch created, Service/tensorboards-web-app-service created, Deployment.apps/tensorboards-web-app-deployment created, and Virtual service.networking.istio.io/t\nCheck if it is installed correctly.\n```bash\nDeployment "tensorboard-web-app-deployment-6ff79b7f44-qbzmw" created\ndeployment.apps/tensorboard-controller-controller-manager created\n')),(0,o.kt)("p",null,"A custom resource definition for 'tensorboards.tensorboard.kubeflow.org' was created, along with a service account, roles, role bindings, a config map, and a deployment for the controller manager metrics service.\nCheck if the deployment.apps/tensorboard-controller-controller-manager was installed correctly. Wait for 1 pod to be Running.\nTranslation: Installing Training Operator."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be output as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created\ncustomresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created\nserviceaccount/training-operator created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created\nclusterrole.rbac.authorization.k8s.io/kubeflow-training-view created\nclusterrole.rbac.authorization.k8s.io/training-operator created\nclusterrolebinding.rbac.authorization.k8s.io/training-operator created\nservice/training-operator created\ndeployment.apps/training-operator created\n")),(0,o.kt)("p",null,"Check to see if it has been installed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow | grep training-operator\n")),(0,o.kt)("p",null,"Wait until one pod is up and running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"training-operator-7d98f9dd88-6887f 1/1 Running 0 28s\n")),(0,o.kt)("h3",{id:"user-namespace"},"User Namespace"),(0,o.kt)("p",null,"For using Kubeflow, create a Kubeflow Profile for the User to be used."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize build common/user-namespace/base | kubectl apply -f -\n")),(0,o.kt)("p",null,"If performed normally, it will be outputted as follows."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"configmap/default-install-config-9h2h2b6hbk created\nprofile.kubeflow.org/kubeflow-user-example-com created\n")),(0,o.kt)("p",null,"Confirm that the kubeflow-user-example-com profile has been created."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get profile\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubeflow-user-example-com 37s\n")),(0,o.kt)("h2",{id:"check-installation"},"Check installation"),(0,o.kt)("p",null,"Confirm successful installation by port forwarding to access Kubeflow central dashboard with web browser."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,o.kt)("p",null,"Open a web browser and connect to ",(0,o.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to confirm that the following screen is displayed.\n",(0,o.kt)("img",{alt:"login-ui",src:a(2396).Z,width:"2554",height:"1202"})),(0,o.kt)("p",null,"Enter the following connection information to connect."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Email Address: ",(0,o.kt)("inlineCode",{parentName:"li"},"user@example.com")),(0,o.kt)("li",{parentName:"ul"},"Password: ",(0,o.kt)("inlineCode",{parentName:"li"},"12341234"))),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"central-dashboard",src:a(5601).Z,width:"4008",height:"1266"})))}d.isMDXComponent=!0},5601:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},2396:(e,t,a)=>{a.d(t,{Z:()=>n});const n=a.p+"assets/images/login-after-install-a3e252f02dc4f4988686d6ae97ddd41f.png"}}]); \ No newline at end of file diff --git a/en/assets/js/81a92311.87cf079a.js b/en/assets/js/81a92311.32de2540.js similarity index 99% rename from en/assets/js/81a92311.87cf079a.js rename to en/assets/js/81a92311.32de2540.js index 38f2a18e..abbac470 100644 --- a/en/assets/js/81a92311.87cf079a.js +++ b/en/assets/js/81a92311.32de2540.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7964],{3905:(e,n,a)=>{a.d(n,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=t.createContext({}),s=function(e){var n=t.useContext(p),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},d=function(e){var n=s(e.components);return t.createElement(p.Provider,{value:n},e.children)},m="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(p,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:n},d),{},{components:a})):t.createElement(c,i({ref:n},d))}));function c(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,i=new Array(l);i[0]=u;var o={};for(var p in n)hasOwnProperty.call(n,p)&&(o[p]=n[p]);o.originalType=e,o[m]="string"==typeof e?e:r,i[1]=o;for(var s=2;s{a.r(n),a.d(n,{assets:()=>p,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>o,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,o={unversionedId:"kubeflow/advanced-mlflow",id:"version-1.0/kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/en/docs/1.0/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/en/docs/1.0/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/en/docs/1.0/kubeflow/how-to-debug"}},p={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. Train model",id:"1-train-model",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(e){let{components:n,...l}=e;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,"In this page, we will explain the process of writing a component to store the model in MLFlow so that the model trained in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-component"},"Advanced Usage Component")," can be linked to API deployment."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"In order to store the model in MLFlow and use it in serving, the following items are needed."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"We will look into the process of saving a model to MLFlow through Python code."),(0,r.kt)("h3",{id:"1-train-model"},"1. Train model"),(0,r.kt)("p",null,"The following steps involve training an SVC model using the iris dataset."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"This process creates the necessary information for MLFlow."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"Each variable's content is as follows."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"Next, we save the learned information and the model. Since the trained model uses the sklearn package, we can easily save the model using ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"If you work locally, a svc folder will be created and the following files will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"If you execute the command above, you can check the following output value."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"Each file will be as follows if checked."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"Now, let's proceed with the task of uploading the saved model to the MLflow server."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"Save and open the ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," directory generated path with ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," command to launch mlflow server and dashboard.\nAccess the mlflow dashboard, click the generated run to view it as below."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(9163).Z,width:"2782",height:"2496"}),"\n(This screen may vary depending on the version of mlflow.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"Now, let's write a reusable component in Kubeflow."),(0,r.kt)("p",null,"The ways of writing components that can be reused are broadly divided into three categories."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"After saving the necessary environment in the component responsible for model training, the MLflow component is only responsible for the upload."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(8453).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Pass the trained model and data to the MLflow component, which is responsible for saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(5142).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"The component responsible for model training handles both saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(204).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"We are trying to manage the model through the first approach.\nThe reason is that we don't need to write the code to upload the MLFlow model every time like three times for each component written."),(0,r.kt)("p",null,"Reusing components is possible by the methods 1 and 2.\nHowever, in the case of 2, it is necessary to deliver the trained image and packages to the component, so ultimately additional information about the component must be delivered."),(0,r.kt)("p",null,"In order to proceed with the method 1, the learning component must also be changed.\nCode that stores the environment needed to save the model must be added."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"Write a component to upload to MLFlow.\nAt this time, configure the uploaded MLFlow endpoint to be connected to the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-mlflow"},"mlflow service")," that we installed.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, use the Kubernetes Service DNS Name of the Minio installed at the time of MLFlow Server installation. As this service is created in the Kubeflow namespace with the name minio-service, set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000"),".",(0,r.kt)("br",{parentName:"p"}),"\n","Similarly, for the tracking_uri address, use the Kubernetes Service DNS Name of the MLFlow server and set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"Now let's connect the components we have written and create a pipeline. "),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"The data we will use to train the model is sklearn's iris.\nWe will write a component to generate the data."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"The pipeline code can be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"If you organize the components and pipelines written above into a single Python file, it would look like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(5327).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"Port-forward the mlflow service to access the MLflow UI."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(2273).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"Click on run to verify that the trained model file is present."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(375).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},9163:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},8453:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},5142:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},204:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},5327:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},2273:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},375:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7964],{3905:(e,n,a)=>{a.d(n,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=t.createContext({}),s=function(e){var n=t.useContext(p),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},d=function(e){var n=s(e.components);return t.createElement(p.Provider,{value:n},e.children)},m="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(p,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:n},d),{},{components:a})):t.createElement(c,i({ref:n},d))}));function c(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,i=new Array(l);i[0]=u;var o={};for(var p in n)hasOwnProperty.call(n,p)&&(o[p]=n[p]);o.originalType=e,o[m]="string"==typeof e?e:r,i[1]=o;for(var s=2;s{a.r(n),a.d(n,{assets:()=>p,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>o,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,o={unversionedId:"kubeflow/advanced-mlflow",id:"version-1.0/kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/en/docs/1.0/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/en/docs/1.0/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/en/docs/1.0/kubeflow/how-to-debug"}},p={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. Train model",id:"1-train-model",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(e){let{components:n,...l}=e;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,"In this page, we will explain the process of writing a component to store the model in MLFlow so that the model trained in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-component"},"Advanced Usage Component")," can be linked to API deployment."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"In order to store the model in MLFlow and use it in serving, the following items are needed."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"We will look into the process of saving a model to MLFlow through Python code."),(0,r.kt)("h3",{id:"1-train-model"},"1. Train model"),(0,r.kt)("p",null,"The following steps involve training an SVC model using the iris dataset."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"This process creates the necessary information for MLFlow."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"Each variable's content is as follows."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"Next, we save the learned information and the model. Since the trained model uses the sklearn package, we can easily save the model using ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"If you work locally, a svc folder will be created and the following files will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"If you execute the command above, you can check the following output value."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"Each file will be as follows if checked."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"Now, let's proceed with the task of uploading the saved model to the MLflow server."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"Save and open the ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," directory generated path with ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," command to launch mlflow server and dashboard.\nAccess the mlflow dashboard, click the generated run to view it as below."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(9163).Z,width:"2782",height:"2496"}),"\n(This screen may vary depending on the version of mlflow.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"Now, let's write a reusable component in Kubeflow."),(0,r.kt)("p",null,"The ways of writing components that can be reused are broadly divided into three categories."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"After saving the necessary environment in the component responsible for model training, the MLflow component is only responsible for the upload."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(8453).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Pass the trained model and data to the MLflow component, which is responsible for saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(5142).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"The component responsible for model training handles both saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(204).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"We are trying to manage the model through the first approach.\nThe reason is that we don't need to write the code to upload the MLFlow model every time like three times for each component written."),(0,r.kt)("p",null,"Reusing components is possible by the methods 1 and 2.\nHowever, in the case of 2, it is necessary to deliver the trained image and packages to the component, so ultimately additional information about the component must be delivered."),(0,r.kt)("p",null,"In order to proceed with the method 1, the learning component must also be changed.\nCode that stores the environment needed to save the model must be added."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"Write a component to upload to MLFlow.\nAt this time, configure the uploaded MLFlow endpoint to be connected to the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-mlflow"},"mlflow service")," that we installed.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, use the Kubernetes Service DNS Name of the Minio installed at the time of MLFlow Server installation. As this service is created in the Kubeflow namespace with the name minio-service, set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000"),".",(0,r.kt)("br",{parentName:"p"}),"\n","Similarly, for the tracking_uri address, use the Kubernetes Service DNS Name of the MLFlow server and set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"Now let's connect the components we have written and create a pipeline. "),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"The data we will use to train the model is sklearn's iris.\nWe will write a component to generate the data."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"The pipeline code can be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"If you organize the components and pipelines written above into a single Python file, it would look like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(5327).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"Port-forward the mlflow service to access the MLflow UI."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(2273).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"Click on run to verify that the trained model file is present."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(375).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},9163:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},8453:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},5142:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},204:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},5327:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},2273:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},375:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file diff --git a/en/assets/js/81e9ac91.74384f64.js b/en/assets/js/81e9ac91.0503ee6e.js similarity index 98% rename from en/assets/js/81e9ac91.74384f64.js rename to en/assets/js/81e9ac91.0503ee6e.js index c8586f50..31cbcbf0 100644 --- a/en/assets/js/81e9ac91.74384f64.js +++ b/en/assets/js/81e9ac91.0503ee6e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9699],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var i=r.createContext({}),o=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=o(e.components);return r.createElement(i.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,a=e.originalType,i=e.parentName,c=u(e,["components","mdxType","originalType","parentName"]),p=o(n),k=s,f=p["".concat(i,".").concat(k)]||p[k]||d[k]||a;return n?r.createElement(f,l(l({ref:t},c),{},{components:n})):r.createElement(f,l({ref:t},c))}));function f(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var a=n.length,l=new Array(a);l[0]=k;var u={};for(var i in t)hasOwnProperty.call(t,i)&&(u[i]=t[i]);u.originalType=e,u[p]="string"==typeof e?e:s,l[1]=u;for(var o=2;o{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>u,toc:()=>o});var r=n(7462),s=(n(7294),n(3905));const a={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,u={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/en/docs/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},i={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:o},p="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,s.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster"),"."),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,s.kt)("p",null,"k3s uses containerd as the backend by default.\nHowever, we need to use docker as the backend to use GPU, so we will install the backend with the ",(0,s.kt)("inlineCode",{parentName:"p"},"--docker")," option."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,s.kt)("p",null,"After installing k3s, check the k3s config."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,s.kt)("p",null,"If installed correctly, the following items will be output. (Security related keys are hidden with <...>.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,s.kt)("p",null,"Set up the Kubernetes cluster by copying the k3s config to be used as the cluster\u2019s kubeconfig."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,s.kt)("p",null,"Grant user access permission to the copied config file."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,s.kt)("h2",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,s.kt)("p",null,"Now move the kubeconfig configured in the cluster to the local.\nSet the path to ",(0,s.kt)("inlineCode",{parentName:"p"},"~/.kube/config")," on the local."),(0,s.kt)("p",null,"The config file copied at first has the server ip set to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443"),".\nModify this value to match the ip of the cluster.\n(We modified it to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," to match the ip of the cluster used in this page.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"helm"),(0,s.kt)("li",{parentName:"ul"},"kustomize"),(0,s.kt)("li",{parentName:"ul"},"CSI plugin"),(0,s.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,s.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,s.kt)("p",null,"Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,s.kt)("p",null,"If you see the following message, it means that the installation was successful."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,s.kt)("h2",{id:"6-references"},"6. References"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9699],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var i=r.createContext({}),o=function(e){var t=r.useContext(i),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=o(e.components);return r.createElement(i.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,a=e.originalType,i=e.parentName,c=u(e,["components","mdxType","originalType","parentName"]),p=o(n),k=s,f=p["".concat(i,".").concat(k)]||p[k]||d[k]||a;return n?r.createElement(f,l(l({ref:t},c),{},{components:n})):r.createElement(f,l({ref:t},c))}));function f(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var a=n.length,l=new Array(a);l[0]=k;var u={};for(var i in t)hasOwnProperty.call(t,i)&&(u[i]=t[i]);u.originalType=e,u[p]="string"==typeof e?e:s,l[1]=u;for(var o=2;o{n.r(t),n.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>u,toc:()=>o});var r=n(7462),s=(n(7294),n(3905));const a={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,u={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/en/docs/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},i={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:o},p="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,s.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster"),"."),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,s.kt)("p",null,"k3s uses containerd as the backend by default.\nHowever, we need to use docker as the backend to use GPU, so we will install the backend with the ",(0,s.kt)("inlineCode",{parentName:"p"},"--docker")," option."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,s.kt)("p",null,"After installing k3s, check the k3s config."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,s.kt)("p",null,"If installed correctly, the following items will be output. (Security related keys are hidden with <...>.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,s.kt)("p",null,"Set up the Kubernetes cluster by copying the k3s config to be used as the cluster\u2019s kubeconfig."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,s.kt)("p",null,"Grant user access permission to the copied config file."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,s.kt)("h2",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,s.kt)("p",null,"Now move the kubeconfig configured in the cluster to the local.\nSet the path to ",(0,s.kt)("inlineCode",{parentName:"p"},"~/.kube/config")," on the local."),(0,s.kt)("p",null,"The config file copied at first has the server ip set to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443"),".\nModify this value to match the ip of the cluster.\n(We modified it to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," to match the ip of the cluster used in this page.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"helm"),(0,s.kt)("li",{parentName:"ul"},"kustomize"),(0,s.kt)("li",{parentName:"ul"},"CSI plugin"),(0,s.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,s.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,s.kt)("p",null,"Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,s.kt)("p",null,"If you see the following message, it means that the installation was successful."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,s.kt)("h2",{id:"6-references"},"6. References"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/82f8e163.96f11516.js b/en/assets/js/82f8e163.8f9802f1.js similarity index 99% rename from en/assets/js/82f8e163.96f11516.js rename to en/assets/js/82f8e163.8f9802f1.js index e5598db7..df1ddae9 100644 --- a/en/assets/js/82f8e163.96f11516.js +++ b/en/assets/js/82f8e163.8f9802f1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5430],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>h});var o=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function l(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=o.createContext({}),d=function(e){var n=o.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):l(l({},n),e)),t},m=function(e){var n=d(e.components);return o.createElement(s.Provider,{value:n},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},c=o.forwardRef((function(e,n){var t=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),p=d(t),c=a,h=p["".concat(s,".").concat(c)]||p[c]||u[c]||r;return t?o.createElement(h,l(l({ref:n},m),{},{components:t})):o.createElement(h,l({ref:n},m))}));function h(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var r=t.length,l=new Array(r);l[0]=c;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:a,l[1]=i;for(var d=2;d{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>i,toc:()=>d});var o=t(7462),a=(t(7294),t(3905));const r={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},l=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/en/docs/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-fields.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/en/docs/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/en/docs/api-deployment/seldon-mlflow"}},s={},d=[{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],m={toc:d},p="wrapper";function u(e){let{components:n,...t}=e;return(0,a.kt)(p,(0,o.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Summary of how Seldon Core creates an API server:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"initContainer downloads the required model from the model repository."),(0,a.kt)("li",{parentName:"ol"},"The downloaded model is passed to the container."),(0,a.kt)("li",{parentName:"ol"},"The container runs an API server enclosing the model."),(0,a.kt)("li",{parentName:"ol"},"The API can be requested at the generated API server address to receive the inference values from the model.")),(0,a.kt)("p",null,"The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," fields of SeldonDeployment are required fields. ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," must be a single array consisting of ",(0,a.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph")," defined. Here also, ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect."),(0,a.kt)("p",null,"Now let's take a look at the fields that need to be defined in ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph"),"."),(0,a.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," must be a single array consisting of the ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," key. The ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," must have the fields ",(0,a.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"initContainers")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"containers")," defined."),(0,a.kt)("h3",{id:"volumes"},"volumes"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"Volumes")," refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"emptyDir"),". These values are used only once when downloading and moving the models, so they do not need to be modified significantly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"args")," field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations."),(0,a.kt)("p",null,"initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry. "),(0,a.kt)("p",null,"The value of initContainer consists of n arrays, and each model needs to be specified separately."),(0,a.kt)("h4",{id:"name"},"name"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"name")," is the name of the pod in Kubernetes, and it is recommended to use ",(0,a.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," for debugging. "),(0,a.kt)("h4",{id:"image"},"image"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"image")," is the name of the image used to download the model, and there are two recommended images by"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,a.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,a.kt)("p",null,"For more detailed information, please refer to the following resources:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,a.kt)("p",null,"In MLOps for ALL, we use kfserving for downloading and storing models."),(0,a.kt)("h4",{id:"args"},"args"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,a.kt)("p",null,"When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (",(0,a.kt)("inlineCode",{parentName:"p"},"run"),"), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models"),")."),(0,a.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"volumeMounts")," is a field that attaches volumes to the Kubernetes to share ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models")," as described in volumes. For more information, refer to Kubernetes Volume ",(0,a.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"Kubernetes Volume"),'."'),(0,a.kt)("h3",{id:"container"},"container"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null," Container defines the fields that determine the configuration when the model is run in an API form."),(0,a.kt)("h4",{id:"name-1"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the pod in Kubernetes. It should be the name of the model being used."),(0,a.kt)("h4",{id:"image-1"},"image"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"image")," field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded."),(0,a.kt)("p",null,"Seldon Core provides official images for different types of models, including:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,a.kt)("p",null,"You can choose the appropriate image based on the type of model you are using."),(0,a.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,a.kt)("p",null,"This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, ",(0,a.kt)("inlineCode",{parentName:"p"},"readOnly: true")," will also be given."),(0,a.kt)("h4",{id:"securitycontext"},"securityContext"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null,"When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service)."),(0,a.kt)("h2",{id:"graph"},"graph"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"This is a field that defines the order in which the model operates."),(0,a.kt)("h3",{id:"name-2"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the model graph. It should match the name defined in the container."),(0,a.kt)("h3",{id:"type"},"type"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"type")," field can have four different values:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"MODEL"),(0,a.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"ROUTER")),(0,a.kt)("p",null,"For detailed explanations of each type, you can refer to the ",(0,a.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"."),(0,a.kt)("h3",{id:"parameters"},"parameters"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"parameters")," field contains values used in the class init. For the sklearnserver, you can find the required values in the ",(0,a.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"following file"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,a.kt)("p",null,"If you look at the code, you can define ",(0,a.kt)("inlineCode",{parentName:"p"},"model_uri")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"method"),"."),(0,a.kt)("h3",{id:"children"},"children"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"children")," field is used when creating the sequence diagram. More details about this field will be explained on the following page."))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5430],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>h});var o=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function l(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=o.createContext({}),d=function(e){var n=o.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):l(l({},n),e)),t},m=function(e){var n=d(e.components);return o.createElement(s.Provider,{value:n},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},c=o.forwardRef((function(e,n){var t=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),p=d(t),c=a,h=p["".concat(s,".").concat(c)]||p[c]||u[c]||r;return t?o.createElement(h,l(l({ref:n},m),{},{components:t})):o.createElement(h,l({ref:n},m))}));function h(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var r=t.length,l=new Array(r);l[0]=c;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:a,l[1]=i;for(var d=2;d{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>i,toc:()=>d});var o=t(7462),a=(t(7294),t(3905));const r={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},l=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/en/docs/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-fields.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/en/docs/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/en/docs/api-deployment/seldon-mlflow"}},s={},d=[{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],m={toc:d},p="wrapper";function u(e){let{components:n,...t}=e;return(0,a.kt)(p,(0,o.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Summary of how Seldon Core creates an API server:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"initContainer downloads the required model from the model repository."),(0,a.kt)("li",{parentName:"ol"},"The downloaded model is passed to the container."),(0,a.kt)("li",{parentName:"ol"},"The container runs an API server enclosing the model."),(0,a.kt)("li",{parentName:"ol"},"The API can be requested at the generated API server address to receive the inference values from the model.")),(0,a.kt)("p",null,"The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," fields of SeldonDeployment are required fields. ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," must be a single array consisting of ",(0,a.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph")," defined. Here also, ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect."),(0,a.kt)("p",null,"Now let's take a look at the fields that need to be defined in ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph"),"."),(0,a.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," must be a single array consisting of the ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," key. The ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," must have the fields ",(0,a.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"initContainers")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"containers")," defined."),(0,a.kt)("h3",{id:"volumes"},"volumes"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"Volumes")," refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"emptyDir"),". These values are used only once when downloading and moving the models, so they do not need to be modified significantly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"args")," field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations."),(0,a.kt)("p",null,"initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry. "),(0,a.kt)("p",null,"The value of initContainer consists of n arrays, and each model needs to be specified separately."),(0,a.kt)("h4",{id:"name"},"name"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"name")," is the name of the pod in Kubernetes, and it is recommended to use ",(0,a.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," for debugging. "),(0,a.kt)("h4",{id:"image"},"image"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"image")," is the name of the image used to download the model, and there are two recommended images by"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,a.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,a.kt)("p",null,"For more detailed information, please refer to the following resources:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,a.kt)("p",null,"In MLOps for ALL, we use kfserving for downloading and storing models."),(0,a.kt)("h4",{id:"args"},"args"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,a.kt)("p",null,"When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (",(0,a.kt)("inlineCode",{parentName:"p"},"run"),"), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models"),")."),(0,a.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"volumeMounts")," is a field that attaches volumes to the Kubernetes to share ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models")," as described in volumes. For more information, refer to Kubernetes Volume ",(0,a.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"Kubernetes Volume"),'."'),(0,a.kt)("h3",{id:"container"},"container"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null," Container defines the fields that determine the configuration when the model is run in an API form."),(0,a.kt)("h4",{id:"name-1"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the pod in Kubernetes. It should be the name of the model being used."),(0,a.kt)("h4",{id:"image-1"},"image"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"image")," field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded."),(0,a.kt)("p",null,"Seldon Core provides official images for different types of models, including:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,a.kt)("p",null,"You can choose the appropriate image based on the type of model you are using."),(0,a.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,a.kt)("p",null,"This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, ",(0,a.kt)("inlineCode",{parentName:"p"},"readOnly: true")," will also be given."),(0,a.kt)("h4",{id:"securitycontext"},"securityContext"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null,"When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service)."),(0,a.kt)("h2",{id:"graph"},"graph"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"This is a field that defines the order in which the model operates."),(0,a.kt)("h3",{id:"name-2"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the model graph. It should match the name defined in the container."),(0,a.kt)("h3",{id:"type"},"type"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"type")," field can have four different values:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"MODEL"),(0,a.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"ROUTER")),(0,a.kt)("p",null,"For detailed explanations of each type, you can refer to the ",(0,a.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"."),(0,a.kt)("h3",{id:"parameters"},"parameters"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"parameters")," field contains values used in the class init. For the sklearnserver, you can find the required values in the ",(0,a.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"following file"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,a.kt)("p",null,"If you look at the code, you can define ",(0,a.kt)("inlineCode",{parentName:"p"},"model_uri")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"method"),"."),(0,a.kt)("h3",{id:"children"},"children"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"children")," field is used when creating the sequence diagram. More details about this field will be explained on the following page."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/85e11584.6de621bb.js b/en/assets/js/85e11584.f41a5ff3.js similarity index 99% rename from en/assets/js/85e11584.6de621bb.js rename to en/assets/js/85e11584.f41a5ff3.js index 99e54a46..3ac62487 100644 --- a/en/assets/js/85e11584.6de621bb.js +++ b/en/assets/js/85e11584.f41a5ff3.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7966],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>_});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var l=a.createContext({}),u=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},s=function(e){var n=u(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,p=e.originalType,l=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),m=u(t),c=i,_=m["".concat(l,".").concat(c)]||m[c]||d[c]||p;return t?a.createElement(_,r(r({ref:n},s),{},{components:t})):a.createElement(_,r({ref:n},s))}));function _(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var p=t.length,r=new Array(p);r[0]=c;var o={};for(var l in n)hasOwnProperty.call(n,l)&&(o[l]=n[l]);o.originalType=e,o[m]="string"==typeof e?e:i,r[1]=o;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>r,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>u});var a=t(7462),i=(t(7294),t(3905));const p={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},r=void 0,o={unversionedId:"kubeflow/advanced-run",id:"kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/en/docs/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/en/docs/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/en/docs/kubeflow/advanced-mlflow"}},l={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],s={toc:u},m="wrapper";function d(e){let{components:n,...p}=e;return(0,i.kt)(m,(0,a.Z)({},s,p,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"Click Run Result and you will see three tabs:\nGraph, Run Output, and Config."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-0.png",src:t(9216).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"graph"},"Graph"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-1.png",src:t(388).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the graph, if you click on the run component, you can check the running information of the component."),(0,i.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,i.kt)("p",null,"The Input/Output tab allows you to view and download the Configurations, Input, and Output Artifacts used in the components."),(0,i.kt)("h3",{id:"logs"},"Logs"),(0,i.kt)("p",null,"In the Logs tab, you can view all the stdout output generated during the execution of the Python code.\nHowever, pods are deleted after a certain period of time, so you may not be able to view them in this tab after a certain time.\nIn that case, you can check them in the main-logs section of the Output artifacts."),(0,i.kt)("h3",{id:"visualizations"},"Visualizations"),(0,i.kt)("p",null,"The Visualizations tab displays plots generated by the components."),(0,i.kt)("p",null,"To generate a plot, you can save the desired values as an argument using ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")'),". The plot should be in HTML format.\nThe conversion process is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,i.kt)("p",null,"If written in pipeline, it will be like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,i.kt)("p",null,"If you run this script and check the resulting ",(0,i.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),", you will see the following."),(0,i.kt)("p",null,(0,i.kt)("details",null,(0,i.kt)("summary",null,"plot_pipeline.yaml"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,i.kt)("p",null,"After running, click Visualization."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-5.png",src:t(4321).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-output"},"Run output"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-2.png",src:t(4312).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric)."),(0,i.kt)("p",null,"To show the evaluation index (Metric), you can save the name and value you want to show in the ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument in json format. For example, you can write it like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,i.kt)("p",null,"We will add a component to generate evaluation metrics to the pipeline created in the ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-pipeline"},"Pipeline")," and execute it. The whole pipeline is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,i.kt)("p",null,"After execution, click Run Output and it will show like this."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-4.png",src:t(6856).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"config"},"Config"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-3.png",src:t(4493).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the Config tab, you can view all the values received as pipeline configurations."))}d.isMDXComponent=!0},9216:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},388:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},4312:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},4493:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},6856:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},4321:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7966],{3905:(e,n,t)=>{t.d(n,{Zo:()=>s,kt:()=>_});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var l=a.createContext({}),u=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},s=function(e){var n=u(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,p=e.originalType,l=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),m=u(t),c=i,_=m["".concat(l,".").concat(c)]||m[c]||d[c]||p;return t?a.createElement(_,r(r({ref:n},s),{},{components:t})):a.createElement(_,r({ref:n},s))}));function _(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var p=t.length,r=new Array(p);r[0]=c;var o={};for(var l in n)hasOwnProperty.call(n,l)&&(o[l]=n[l]);o.originalType=e,o[m]="string"==typeof e?e:i,r[1]=o;for(var u=2;u{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>r,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>u});var a=t(7462),i=(t(7294),t(3905));const p={title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},r=void 0,o={unversionedId:"kubeflow/advanced-run",id:"kubeflow/advanced-run",title:"11. Pipeline - Run Result",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-run",permalink:"/en/docs/kubeflow/advanced-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-run.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:11,frontMatter:{title:"11. Pipeline - Run Result",description:"",sidebar_position:11,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"10. Pipeline - Setting",permalink:"/en/docs/kubeflow/advanced-pipeline"},next:{title:"12. Component - MLFlow",permalink:"/en/docs/kubeflow/advanced-mlflow"}},l={},u=[{value:"Run Result",id:"run-result",level:2},{value:"Graph",id:"graph",level:2},{value:"Input/Output",id:"inputoutput",level:3},{value:"Logs",id:"logs",level:3},{value:"Visualizations",id:"visualizations",level:3},{value:"Run output",id:"run-output",level:2},{value:"Config",id:"config",level:2}],s={toc:u},m="wrapper";function d(e){let{components:n,...p}=e;return(0,i.kt)(m,(0,a.Z)({},s,p,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"Click Run Result and you will see three tabs:\nGraph, Run Output, and Config."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-0.png",src:t(9216).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"graph"},"Graph"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-1.png",src:t(388).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the graph, if you click on the run component, you can check the running information of the component."),(0,i.kt)("h3",{id:"inputoutput"},"Input/Output"),(0,i.kt)("p",null,"The Input/Output tab allows you to view and download the Configurations, Input, and Output Artifacts used in the components."),(0,i.kt)("h3",{id:"logs"},"Logs"),(0,i.kt)("p",null,"In the Logs tab, you can view all the stdout output generated during the execution of the Python code.\nHowever, pods are deleted after a certain period of time, so you may not be able to view them in this tab after a certain time.\nIn that case, you can check them in the main-logs section of the Output artifacts."),(0,i.kt)("h3",{id:"visualizations"},"Visualizations"),(0,i.kt)("p",null,"The Visualizations tab displays plots generated by the components."),(0,i.kt)("p",null,"To generate a plot, you can save the desired values as an argument using ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_ui_metadata: OutputPath("UI_Metadata")'),". The plot should be in HTML format.\nThe conversion process is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(\n mlpipeline_ui_metadata: OutputPath("UI_Metadata")\n):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot(x=[1, 2, 3], y=[1, 2,3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n')),(0,i.kt)("p",null,"If written in pipeline, it will be like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["matplotlib"],\n)\ndef plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):\n import base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n\n@pipeline(name="plot_pipeline")\ndef plot_pipeline():\n plot_linear()\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")\n')),(0,i.kt)("p",null,"If you run this script and check the resulting ",(0,i.kt)("inlineCode",{parentName:"p"},"plot_pipeline.yaml"),", you will see the following."),(0,i.kt)("p",null,(0,i.kt)("details",null,(0,i.kt)("summary",null,"plot_pipeline.yaml"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: plot-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2\n022-01-17T13:31:32.963214\',\n pipelines.kubeflow.org/pipeline_spec: \'{"name": "plot_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: plot-pipeline\n templates:\n - name: plot-linear\n container:\n args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'matplotlib\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet\n --no-warn-script-location \'matplotlib\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n def plot_linear(mlpipeline_ui_metadata):\n import base64\n import json\n from io import BytesIO\n import matplotlib.pyplot as plt\n plt.plot([1, 2, 3], [1, 2, 3])\n tmpfile = BytesIO()\n plt.savefig(tmpfile, format="png")\n encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")\n html = f""\n metadata = {\n "outputs": [\n {\n "type": "web-app",\n "storage": "inline",\n "source": html,\n },\n ],\n }\n with open(mlpipeline_ui_metadata, "w") as html_writer:\n json.dump(metadata, html_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Plot linear\', description=\'\')\n _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n _outputs = plot_linear(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'matplotlib\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'matplotlib\'\'\n --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef plot_linear(mlpipeline_ui_metadata):\\n import\n base64\\n import json\\n from io import BytesIO\\n\\n import matplotlib.pyplot\n as plt\\n\\n plt.plot([1, 2, 3], [1, 2, 3])\\n\\n tmpfile = BytesIO()\\n plt.savefig(tmpfile,\n format=\\"png\\")\\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\\"utf-8\\")\\n\\n html\n = f\\"\\"\\n metadata = {\\n \\"outputs\\":\n [\\n {\\n \\"type\\": \\"web-app\\",\\n \\"storage\\":\n \\"inline\\",\\n \\"source\\": html,\\n },\\n ],\\n }\\n with\n open(mlpipeline_ui_metadata, \\"w\\") as html_writer:\\n json.dump(metadata,\n html_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Plot\n linear\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--mlpipeline-ui-metadata\\",\n dest=\\"mlpipeline_ui_metadata\\", type=_make_parent_dirs_and_return_path,\n required=True, default=argparse.SUPPRESS)\\n_parsed_args = vars(_parser.parse_args())\\n\\n_outputs\n = plot_linear(**_parsed_args)\\n"], "image": "python:3.7"}}, "name": "Plot\n linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}\',\n pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: plot-pipeline\n dag:\n tasks:\n - {name: plot-linear, template: plot-linear}\n arguments:\n parameters: []\n serviceAccountName: pipeline-runner\n')))),(0,i.kt)("p",null,"After running, click Visualization."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-5.png",src:t(4321).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-output"},"Run output"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-2.png",src:t(4312).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric)."),(0,i.kt)("p",null,"To show the evaluation index (Metric), you can save the name and value you want to show in the ",(0,i.kt)("inlineCode",{parentName:"p"},'mlpipeline_metrics_path: OutputPath("Metrics")')," argument in json format. For example, you can write it like this."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n')),(0,i.kt)("p",null,"We will add a component to generate evaluation metrics to the pipeline created in the ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-pipeline"},"Pipeline")," and execute it. The whole pipeline is as follows."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func, OutputPath\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int) -> int:\n sum_number = number_1 + number_2\n print(sum_number)\n return sum_number\n\n@create_component_from_func\ndef show_metric_of_sum(\n number: int,\n mlpipeline_metrics_path: OutputPath("Metrics"),\n ):\n import json\n metrics = {\n "metrics": [\n {\n "name": "sum_value",\n "numberValue": number,\n },\n ],\n }\n with open(mlpipeline_metrics_path, "w") as f:\n json.dump(metrics, f)\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1)\n number_2_result = print_and_return_number(number_2)\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n )\n show_metric_of_sum(sum_result.output)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,i.kt)("p",null,"After execution, click Run Output and it will show like this."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-4.png",src:t(6856).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"config"},"Config"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"advanced-run-3.png",src:t(4493).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"In the Config tab, you can view all the values received as pipeline configurations."))}d.isMDXComponent=!0},9216:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-0-adc975b65f29dee20a2bf33c969773d5.png"},388:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-1-cfdbe4b3c9d101eecde409c9baf10dbb.png"},4312:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-2-2b0de3bdf8fa16c0e318d2dffda1f9f8.png"},4493:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-3-13783474cf32a499f90a11fc84575eea.png"},6856:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-4-3bfbf40826566f37cb8512a2e2889038.png"},4321:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/advanced-run-5-8de88b76e09f491c9a7c86642a12fbd9.png"}}]); \ No newline at end of file diff --git a/en/assets/js/8657d6b7.96da70e6.js b/en/assets/js/8657d6b7.192566b4.js similarity index 99% rename from en/assets/js/8657d6b7.96da70e6.js rename to en/assets/js/8657d6b7.192566b4.js index 049f61d3..4be471f6 100644 --- a/en/assets/js/8657d6b7.96da70e6.js +++ b/en/assets/js/8657d6b7.192566b4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6680],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=a.createContext({}),s=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=s(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,l=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),m=s(t),_=r,u=m["".concat(l,".").concat(_)]||m[_]||c[_]||i;return t?a.createElement(u,o(o({ref:n},d),{},{components:t})):a.createElement(u,o({ref:n},d))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=_;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>p,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const i={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},o=void 0,p={unversionedId:"kubeflow/advanced-environment",id:"kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/en/docs/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-environment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/en/docs/kubeflow/advanced-pipeline"}},l={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Adding packages",id:"adding-packages",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],d={toc:s},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"When we run the pipeline written in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),", it fails. Let's find out why it fails and modify it so that it can run properly. "),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Let's convert the component written ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-component#convert-to-kubeflow-format"},"earlier")," into a yaml file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you run the script above, you will get a ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file like the one below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"According to the content explained in the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component")," previously mentioned, this component will be executed as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"However, when running the component created above, an error will occur.",(0,r.kt)("br",{parentName:"p"}),"\n","The reason is in the way the component wrapper is executed.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow uses Kubernetes, so the component wrapper runs the component content on its own separate container."),(0,r.kt)("p",null,"In detail, the image specified in the generated ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7"),"."),(0,r.kt)("p",null,"There may be some people who notice why it is not running for some reason."),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," image does not have the packages we want to use, such as ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", and ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn"),", installed.",(0,r.kt)("br",{parentName:"p"}),"\n","Therefore, when executing, it fails with an error indicating that the packages are not found."),(0,r.kt)("p",null,"So, how can we add the packages?"),(0,r.kt)("h2",{id:"adding-packages"},"Adding packages"),(0,r.kt)("p",null,"During the process of converting Kubeflow, there are two ways to add packages:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"base_image")),(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install"))),(0,r.kt)("p",null,"Let's check what arguments the function ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," used to compile the components can receive."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": Function that creates the component wrapper to be made into a component."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": Image that the component wrapper will run on."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": Additional packages that need to be installed for the component to use.")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"Take a closer look at the sequence in which the component is executed and it will be as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"If the base_image used by the component already has all the packages installed, you can use it without installing additional packages."),(0,r.kt)("p",null,"For example, on this page we are going to write a Dockerfile like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"Let's build the image using the Dockerfile above. The Docker hub we will use for the practice is ghcr.",(0,r.kt)("br",{parentName:"p"}),"\n","You can choose a Docker hub according to your environment and upload it."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"Now let's try inputting the base image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you compile the generated component, it will appear as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"We can confirm that the base_image has been changed to the value we have set."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"However, when packages are added, it takes a lot of time to create a new Docker image.\nIn this case, we can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument to easily add packages to the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you execute the script, the ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"If we take a closer look at the order in which the components written above are executed, it looks like this:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6680],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=a.createContext({}),s=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=s(e.components);return a.createElement(l.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},_=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,i=e.originalType,l=e.parentName,d=p(e,["components","mdxType","originalType","parentName"]),m=s(t),_=r,u=m["".concat(l,".").concat(_)]||m[_]||c[_]||i;return t?a.createElement(u,o(o({ref:n},d),{},{components:t})):a.createElement(u,o({ref:n},d))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=t.length,o=new Array(i);o[0]=_;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p[m]="string"==typeof e?e:r,o[1]=p;for(var s=2;s{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>o,default:()=>c,frontMatter:()=>i,metadata:()=>p,toc:()=>s});var a=t(7462),r=(t(7294),t(3905));const i={title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},o=void 0,p={unversionedId:"kubeflow/advanced-environment",id:"kubeflow/advanced-environment",title:"9. Component - Environment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-environment.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-environment",permalink:"/en/docs/kubeflow/advanced-environment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-environment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:9,frontMatter:{title:"9. Component - Environment",description:"",sidebar_position:9,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/kubeflow/advanced-component"},next:{title:"10. Pipeline - Setting",permalink:"/en/docs/kubeflow/advanced-pipeline"}},l={},s=[{value:"Component Environment",id:"component-environment",level:2},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Adding packages",id:"adding-packages",level:2},{value:"1. base_image",id:"1-base_image",level:3},{value:"2. packages_to_install",id:"2-packages_to_install",level:3}],d={toc:s},m="wrapper";function c(e){let{components:n,...t}=e;return(0,r.kt)(m,(0,a.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"component-environment"},"Component Environment"),(0,r.kt)("p",null,"When we run the pipeline written in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-component"},"8. Component - InputPath/OutputPath"),", it fails. Let's find out why it fails and modify it so that it can run properly. "),(0,r.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,r.kt)("p",null,"Let's convert the component written ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-component#convert-to-kubeflow-format"},"earlier")," into a yaml file."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you run the script above, you will get a ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file like the one below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: model, type: dill}\n- {name: kernel, type: String}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --model\n - {inputPath: model}\n - --kernel\n - {inputValue: kernel}\n')),(0,r.kt)("p",null,"According to the content explained in the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/basic-component#convert-to-kubeflow-format"},"Basic Usage Component")," previously mentioned, this component will be executed as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"However, when running the component created above, an error will occur.",(0,r.kt)("br",{parentName:"p"}),"\n","The reason is in the way the component wrapper is executed.",(0,r.kt)("br",{parentName:"p"}),"\n","Kubeflow uses Kubernetes, so the component wrapper runs the component content on its own separate container."),(0,r.kt)("p",null,"In detail, the image specified in the generated ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"image: python:3.7"),"."),(0,r.kt)("p",null,"There may be some people who notice why it is not running for some reason."),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.7")," image does not have the packages we want to use, such as ",(0,r.kt)("inlineCode",{parentName:"p"},"dill"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"pandas"),", and ",(0,r.kt)("inlineCode",{parentName:"p"},"sklearn"),", installed.",(0,r.kt)("br",{parentName:"p"}),"\n","Therefore, when executing, it fails with an error indicating that the packages are not found."),(0,r.kt)("p",null,"So, how can we add the packages?"),(0,r.kt)("h2",{id:"adding-packages"},"Adding packages"),(0,r.kt)("p",null,"During the process of converting Kubeflow, there are two ways to add packages:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"base_image")),(0,r.kt)("li",{parentName:"ol"},"Using ",(0,r.kt)("inlineCode",{parentName:"li"},"package_to_install"))),(0,r.kt)("p",null,"Let's check what arguments the function ",(0,r.kt)("inlineCode",{parentName:"p"},"create_component_from_func")," used to compile the components can receive."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"def create_component_from_func(\n func: Callable,\n output_component_file: Optional[str] = None,\n base_image: Optional[str] = None,\n packages_to_install: List[str] = None,\n annotations: Optional[Mapping[str, str]] = None,\n):\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"func"),": Function that creates the component wrapper to be made into a component."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"base_image"),": Image that the component wrapper will run on."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"packages_to_install"),": Additional packages that need to be installed for the component to use.")),(0,r.kt)("h3",{id:"1-base_image"},"1. base_image"),(0,r.kt)("p",null,"Take a closer look at the sequence in which the component is executed and it will be as follows:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull base_image")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install packages_to_install")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"If the base_image used by the component already has all the packages installed, you can use it without installing additional packages."),(0,r.kt)("p",null,"For example, on this page we are going to write a Dockerfile like this:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-dockerfile"},"FROM python:3.7\n\nRUN pip install dill pandas scikit-learn\n")),(0,r.kt)("p",null,"Let's build the image using the Dockerfile above. The Docker hub we will use for the practice is ghcr.",(0,r.kt)("br",{parentName:"p"}),"\n","You can choose a Docker hub according to your environment and upload it."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image\ndocker push ghcr.io/mlops-for-all/base-image\n")),(0,r.kt)("p",null,"Now let's try inputting the base image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n base_image="ghcr.io/mlops-for-all/base-image:latest",\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you compile the generated component, it will appear as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: ghcr.io/mlops-for-all/base-image:latest\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"We can confirm that the base_image has been changed to the value we have set."),(0,r.kt)("h3",{id:"2-packages_to_install"},"2. packages_to_install"),(0,r.kt)("p",null,"However, when packages are added, it takes a lot of time to create a new Docker image.\nIn this case, we can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"packages_to_install")," argument to easily add packages to the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\nif __name__ == "__main__":\n train_from_csv.component_spec.save("train_from_csv.yaml")\n')),(0,r.kt)("p",null,"If you execute the script, the ",(0,r.kt)("inlineCode",{parentName:"p"},"train_from_csv.yaml")," file will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'name: Train from csv\ninputs:\n- {name: train_data, type: csv}\n- {name: train_target, type: csv}\n- {name: kernel, type: String}\noutputs:\n- {name: model, type: dill}\nimplementation:\n container:\n image: python:3.7\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill==0.3.4\' \'pandas==1.3.4\' \'scikit-learn==1.0.1\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill==0.3.4\' \'pandas==1.3.4\'\n \'scikit-learn==1.0.1\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n kernel,\n ):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n args:\n - --train-data\n - {inputPath: train_data}\n - --train-target\n - {inputPath: train_target}\n - --kernel\n - {inputValue: kernel}\n - --model\n - {outputPath: model}\n')),(0,r.kt)("p",null,"If we take a closer look at the order in which the components written above are executed, it looks like this:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"docker pull python:3.7")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1")),(0,r.kt)("li",{parentName:"ol"},"run ",(0,r.kt)("inlineCode",{parentName:"li"},"command"))),(0,r.kt)("p",null,"When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"}," command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n 'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'\n 'scikit-learn==1.0.1' --user) && \"$0\" \"$@\"\n")))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/8687dcee.2d3a81a1.js b/en/assets/js/8687dcee.ba6fb95e.js similarity index 99% rename from en/assets/js/8687dcee.2d3a81a1.js rename to en/assets/js/8687dcee.ba6fb95e.js index 87703ed8..e0e58edd 100644 --- a/en/assets/js/8687dcee.2d3a81a1.js +++ b/en/assets/js/8687dcee.ba6fb95e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9366],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>m});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,l=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=i,m=u["".concat(s,".").concat(c)]||u[c]||h[c]||l;return t?a.createElement(m,r(r({ref:n},v),{},{components:t})):a.createElement(m,r({ref:n},v))}));function m(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var l=t.length,r=new Array(l);r[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:i,r[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>h,frontMatter:()=>l,metadata:()=>o,toc:()=>p});var a=t(7462),i=(t(7294),t(3905));const l={title:"1. Install Python virtual environment",sidebar_position:1},r=void 0,o={unversionedId:"appendix/pyenv",id:"version-1.0/appendix/pyenv",title:"1. Install Python virtual environment",description:"Python virtual environment",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/en/docs/1.0/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/pyenv.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Install Python virtual environment",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/en/docs/1.0/api-deployment/seldon-children"},next:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/1.0/appendix/metallb"}},s={},p=[{value:"Python virtual environment",id:"python-virtual-environment",level:2},{value:"Installing pyenv",id:"installing-pyenv",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"Installation - macOS",id:"installation---macos",level:3},{value:"Installation - Ubuntu",id:"installation---ubuntu",level:3},{value:"Using pyenv",id:"using-pyenv",level:2},{value:"Install python version",id:"install-python-version",level:3},{value:"Create python virtual environment",id:"create-python-virtual-environment",level:3},{value:"Activating python virtual environment",id:"activating-python-virtual-environment",level:3},{value:"Deactivating python virtual environment",id:"deactivating-python-virtual-environment",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,i.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"python-virtual-environment"},"Python virtual environment"),(0,i.kt)("p",null,"When working with Python, there may be cases where you want to use multiple versions of Python environments or manage package versions separately for different projects."),(0,i.kt)("p",null,"To easily manage Python environments or Python package environments in a virtualized manner, there are tools available such as pyenv, conda, virtualenv, and venv."),(0,i.kt)("p",null,"Among these, ",(0,i.kt)("em",{parentName:"p"},"MLOps for ALL")," covers the installation of ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv")," and ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),".",(0,i.kt)("br",{parentName:"p"}),"\n","pyenv helps manage Python versions, while pyenv-virtualenv is a plugin for pyenv that helps manage Python package environments."),(0,i.kt)("h2",{id:"installing-pyenv"},"Installing pyenv"),(0,i.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"Prerequisites vary depending on the operating system. Please refer to the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"following page")," and install the required packages accordingly."),(0,i.kt)("h3",{id:"installation---macos"},"Installation - macOS"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv, pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"For macOS, assuming the use of zsh since the default shell has changed to zsh in Catalina version and later, setting up pyenv."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h3",{id:"installation---ubuntu"},"Installation - Ubuntu"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv and pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,i.kt)("p",null,"If the following content is output, it means that the installation is successful."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\nSkip...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"Assuming the use of bash shell as the default shell, configure pyenv and pyenv-virtualenv to be used in bash."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,i.kt)("p",null,"Enter the following string and save it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,i.kt)("p",null,"Restart the shell."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("p",null,"If the following message is displayed, it means that the settings have been configured correctly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h2",{id:"using-pyenv"},"Using pyenv"),(0,i.kt)("h3",{id:"install-python-version"},"Install python version"),(0,i.kt)("p",null,"Using the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv install ")," command, you can install the desired Python version.",(0,i.kt)("br",{parentName:"p"}),"\n","In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,i.kt)("p",null,"If installed normally, the following message will be printed."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,i.kt)("h3",{id:"create-python-virtual-environment"},"Create python virtual environment"),(0,i.kt)("p",null,"Create a Python virtual environment with the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv ")," command to create a Python virtual environment with the desired Python version."),(0,i.kt)("p",null,"For example, let's create a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo")," with Python 3.7.12 version."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,i.kt)("h3",{id:"activating-python-virtual-environment"},"Activating python virtual environment"),(0,i.kt)("p",null,"Use the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv activate ")," command to use the virtual environment created in this way."),(0,i.kt)("p",null,"For example, we will use a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,i.kt)("p",null,"You can see that the information of the current virtual environment is printed at the front of the shell."),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,i.kt)("h3",{id:"deactivating-python-virtual-environment"},"Deactivating python virtual environment"),(0,i.kt)("p",null,"You can deactivate the currently active virtualenv by using the command ",(0,i.kt)("inlineCode",{parentName:"p"},"source deactivate"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9366],{3905:(e,n,t)=>{t.d(n,{Zo:()=>v,kt:()=>m});var a=t(7294);function i(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function l(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(i[t]=e[t]);return i}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(i[t]=e[t])}return i}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},v=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},u="mdxType",h={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,i=e.mdxType,l=e.originalType,s=e.parentName,v=o(e,["components","mdxType","originalType","parentName"]),u=p(t),c=i,m=u["".concat(s,".").concat(c)]||u[c]||h[c]||l;return t?a.createElement(m,r(r({ref:n},v),{},{components:t})):a.createElement(m,r({ref:n},v))}));function m(e,n){var t=arguments,i=n&&n.mdxType;if("string"==typeof e||i){var l=t.length,r=new Array(l);r[0]=c;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o[u]="string"==typeof e?e:i,r[1]=o;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>h,frontMatter:()=>l,metadata:()=>o,toc:()=>p});var a=t(7462),i=(t(7294),t(3905));const l={title:"1. Install Python virtual environment",sidebar_position:1},r=void 0,o={unversionedId:"appendix/pyenv",id:"version-1.0/appendix/pyenv",title:"1. Install Python virtual environment",description:"Python virtual environment",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/appendix/pyenv.md",sourceDirName:"appendix",slug:"/appendix/pyenv",permalink:"/en/docs/1.0/appendix/pyenv",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/appendix/pyenv.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Install Python virtual environment",sidebar_position:1},sidebar:"tutorialSidebar",previous:{title:"6. Multi Models",permalink:"/en/docs/1.0/api-deployment/seldon-children"},next:{title:"2. Install load balancer metallb for Bare Metal Cluster",permalink:"/en/docs/1.0/appendix/metallb"}},s={},p=[{value:"Python virtual environment",id:"python-virtual-environment",level:2},{value:"Installing pyenv",id:"installing-pyenv",level:2},{value:"Prerequisites",id:"prerequisites",level:3},{value:"Installation - macOS",id:"installation---macos",level:3},{value:"Installation - Ubuntu",id:"installation---ubuntu",level:3},{value:"Using pyenv",id:"using-pyenv",level:2},{value:"Install python version",id:"install-python-version",level:3},{value:"Create python virtual environment",id:"create-python-virtual-environment",level:3},{value:"Activating python virtual environment",id:"activating-python-virtual-environment",level:3},{value:"Deactivating python virtual environment",id:"deactivating-python-virtual-environment",level:3}],v={toc:p},u="wrapper";function h(e){let{components:n,...t}=e;return(0,i.kt)(u,(0,a.Z)({},v,t,{components:n,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"python-virtual-environment"},"Python virtual environment"),(0,i.kt)("p",null,"When working with Python, there may be cases where you want to use multiple versions of Python environments or manage package versions separately for different projects."),(0,i.kt)("p",null,"To easily manage Python environments or Python package environments in a virtualized manner, there are tools available such as pyenv, conda, virtualenv, and venv."),(0,i.kt)("p",null,"Among these, ",(0,i.kt)("em",{parentName:"p"},"MLOps for ALL")," covers the installation of ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv"},"pyenv")," and ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv-virtualenv"},"pyenv-virtualenv"),".",(0,i.kt)("br",{parentName:"p"}),"\n","pyenv helps manage Python versions, while pyenv-virtualenv is a plugin for pyenv that helps manage Python package environments."),(0,i.kt)("h2",{id:"installing-pyenv"},"Installing pyenv"),(0,i.kt)("h3",{id:"prerequisites"},"Prerequisites"),(0,i.kt)("p",null,"Prerequisites vary depending on the operating system. Please refer to the ",(0,i.kt)("a",{parentName:"p",href:"https://github.com/pyenv/pyenv/wiki#suggested-build-environment"},"following page")," and install the required packages accordingly."),(0,i.kt)("h3",{id:"installation---macos"},"Installation - macOS"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv, pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"brew update\nbrew install pyenv\nbrew install pyenv-virtualenv\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"For macOS, assuming the use of zsh since the default shell has changed to zsh in Catalina version and later, setting up pyenv."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"echo 'eval \"$(pyenv init -)\"' >> ~/.zshrc\necho 'eval \"$(pyenv virtualenv-init -)\"' >> ~/.zshrc\nsource ~/.zshrc\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv --help\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h3",{id:"installation---ubuntu"},"Installation - Ubuntu"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},"Install pyenv and pyenv-virtualenv")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl https://pyenv.run | bash\n")),(0,i.kt)("p",null,"If the following content is output, it means that the installation is successful."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"}," % Total % Received % Xferd Average Speed Time Time Time Current\n Dload Upload Total Spent Left Speed\n 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239\nCloning into '/home/mlops/.pyenv'...\nr\n...\nSkip...\n...\nremote: Enumerating objects: 10, done.\nremote: Counting objects: 100% (10/10), done.\nremote: Compressing objects: 100% (6/6), done.\nremote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0\nUnpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.\n\nWARNING: seems you still have not added 'pyenv' to the load path.\n\n\n# See the README for instructions on how to set up\n# your shell environment for Pyenv.\n\n# Load pyenv-virtualenv automatically by adding\n# the following to ~/.bashrc:\n\neval \"$(pyenv virtualenv-init -)\"\n\n")),(0,i.kt)("ol",{start:2},(0,i.kt)("li",{parentName:"ol"},"Set pyenv")),(0,i.kt)("p",null,"Assuming the use of bash shell as the default shell, configure pyenv and pyenv-virtualenv to be used in bash."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo vi ~/.bashrc\n")),(0,i.kt)("p",null,"Enter the following string and save it."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'export PATH="$HOME/.pyenv/bin:$PATH"\neval "$(pyenv init -)"\neval "$(pyenv virtualenv-init -)"\n')),(0,i.kt)("p",null,"Restart the shell."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"exec $SHELL\n")),(0,i.kt)("p",null,"Check if the pyenv command is executed properly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv --help\n")),(0,i.kt)("p",null,"If the following message is displayed, it means that the settings have been configured correctly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv\npyenv 2.2.2\nUsage: pyenv []\n\nSome useful pyenv commands are:\n --version Display the version of pyenv\n activate Activate virtual environment\n commands List all available pyenv commands\n deactivate Deactivate virtual environment\n doctor Verify pyenv installation and development tools to build pythons.\n exec Run an executable with the selected Python version\n global Set or show the global Python version(s)\n help Display help for a command\n hooks List hook scripts for a given pyenv command\n init Configure the shell environment for pyenv\n install Install a Python version using python-build\n local Set or show the local application-specific Python version(s)\n prefix Display prefix for a Python version\n rehash Rehash pyenv shims (run this after installing executables)\n root Display the root directory where versions and shims are kept\n shell Set or show the shell-specific Python version\n shims List existing pyenv shims\n uninstall Uninstall a specific Python version\n version Show the current Python version(s) and its origin\n version-file Detect the file that sets the current pyenv version\n version-name Show the current Python version\n version-origin Explain how the current Python version is set\n versions List all Python versions available to pyenv\n virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin\n virtualenv-delete Uninstall a specific Python virtualenv\n virtualenv-init Configure the shell environment for pyenv-virtualenv\n virtualenv-prefix Display real_prefix for a Python virtualenv version\n virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.\n whence List all Python versions that contain the given executable\n which Display the full path to an executable\n\nSee `pyenv help ' for information on a specific command.\nFor full documentation, see: https://github.com/pyenv/pyenv#readme\n")),(0,i.kt)("h2",{id:"using-pyenv"},"Using pyenv"),(0,i.kt)("h3",{id:"install-python-version"},"Install python version"),(0,i.kt)("p",null,"Using the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv install ")," command, you can install the desired Python version.",(0,i.kt)("br",{parentName:"p"}),"\n","In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv install 3.7.12\n")),(0,i.kt)("p",null,"If installed normally, the following message will be printed."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv install 3.7.12\nDownloading Python-3.7.12.tar.xz...\n-> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz\nInstalling Python-3.7.12...\npatching file Doc/library/ctypes.rst\npatching file Lib/test/test_unicode.py\npatching file Modules/_ctypes/_ctypes.c\npatching file Modules/_ctypes/callproc.c\npatching file Modules/_ctypes/ctypes.h\npatching file setup.py\npatching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'\npatching file Modules/_decimal/libmpdec/mpdecimal.h\nInstalled Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12\n")),(0,i.kt)("h3",{id:"create-python-virtual-environment"},"Create python virtual environment"),(0,i.kt)("p",null,"Create a Python virtual environment with the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv virtualenv ")," command to create a Python virtual environment with the desired Python version."),(0,i.kt)("p",null,"For example, let's create a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo")," with Python 3.7.12 version."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv virtualenv 3.7.12 demo\n")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"$ pyenv virtualenv 3.7.12 demo\nLooking in links: /tmp/tmpffqys0gv\nRequirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)\nRequirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)\n")),(0,i.kt)("h3",{id:"activating-python-virtual-environment"},"Activating python virtual environment"),(0,i.kt)("p",null,"Use the ",(0,i.kt)("inlineCode",{parentName:"p"},"pyenv activate ")," command to use the virtual environment created in this way."),(0,i.kt)("p",null,"For example, we will use a Python virtual environment called ",(0,i.kt)("inlineCode",{parentName:"p"},"demo"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv activate demo\n")),(0,i.kt)("p",null,"You can see that the information of the current virtual environment is printed at the front of the shell."),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ pyenv activate demo\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.\n(demo) mlops@ubuntu:~$ \n")),(0,i.kt)("h3",{id:"deactivating-python-virtual-environment"},"Deactivating python virtual environment"),(0,i.kt)("p",null,"You can deactivate the currently active virtualenv by using the command ",(0,i.kt)("inlineCode",{parentName:"p"},"source deactivate"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"source deactivate\n")),(0,i.kt)("p",null," Before"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"(demo) mlops@ubuntu:~$ source deactivate\n")),(0,i.kt)("p",null," After"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ \n")))}h.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/89ac38ee.474c210d.js b/en/assets/js/89ac38ee.cfa46176.js similarity index 99% rename from en/assets/js/89ac38ee.474c210d.js rename to en/assets/js/89ac38ee.cfa46176.js index 7aa773b6..d36b51a7 100644 --- a/en/assets/js/89ac38ee.474c210d.js +++ b/en/assets/js/89ac38ee.cfa46176.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9371],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=a.createContext({}),p=function(e){var t=a.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},m=function(e){var t=p(e.components);return a.createElement(c.Provider,{value:t},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,c=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),s=p(n),u=r,k=s["".concat(c,".").concat(u)]||s[u]||d[u]||o;return n?a.createElement(k,i(i({ref:t},m),{},{components:n})):a.createElement(k,i({ref:t},m))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=u;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[s]="string"==typeof e?e:r,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/images",id:"prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/en/docs/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/images.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/en/docs/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/en/docs/prerequisites/docker/advanced"}},c={},p=[],m={toc:p},s="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(s,(0,a.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 \ubc29\ubc95"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},'docker commit -m "message" -a "author" ')),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit")," \uc744 \uc0ac\uc6a9\ud558\uba74, \uc218\ub3d9\uc73c\ub85c Dockerfile \uc744 \ub9cc\ub4e4\uc9c0 \uc54a\uace0\ub3c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"touch Dockerfile\n")))))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Move to the docker-practice folder.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Create an empty file called Dockerfile.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc774\ubbf8\uc9c0\uc5d0 \ud2b9\uc815 \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ubb34\uc5c7\uc785\ub2c8\uae4c?"))),(0,r.kt)("p",null,"Answer: ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN")),(0,r.kt)("p",null,"Translation: Let's look at the basic commands that can be used in Dockerfile one by one. FROM is a command that specifies which image to use as a base image for Dockerfile. When creating a Docker image, instead of creating the environment I intend from scratch, I can use a pre-made image such as ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", etc. as the base and install pytorch and add my source code."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,r.kt)("p",null,"The command to copy files or directories from the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path on the host (local) to the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,r.kt)("p",null,"ADD is similar to COPY but it has additional features."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,r.kt)("p",null,"The command to run the specified command inside a Docker container.\nDocker images maintain the state in which the commands are executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,r.kt)("p",null,"CMD specifies a command that the Docker container will ",(0,r.kt)("strong",{parentName:"p"},"run when it starts"),". There is a similar command called ",(0,r.kt)("strong",{parentName:"p"},"ENTRYPOINT"),". The difference between them will be discussed ",(0,r.kt)("strong",{parentName:"p"},"later"),". Note that only one ",(0,r.kt)("strong",{parentName:"p"},"CMD")," can be run in one Docker image, which is different from ",(0,r.kt)("strong",{parentName:"p"},"RUN")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,r.kt)("p",null,"WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,r.kt)("p",null,"This is a command to set the value of environment variables that will be used continuously inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,r.kt)("p",null,"You can specify the port/protocol to be opened from the container. If ",(0,r.kt)("inlineCode",{parentName:"p"},"")," is not specified, TCP is set as the default."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,r.kt)("p",null,"Write a simple Dockerfile by using ",(0,r.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," or an editor like vscode and write the following:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,r.kt)("p",null,"Use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," command to create a Docker Image from a Dockerfile."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,r.kt)("p",null,"Run the following command from the path where the Dockerfile is located."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,r.kt)("p",null,'The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let\'s check if the image was built successfully.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,r.kt)("p",null,"If performed normally, it will output as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,r.kt)("p",null,"Let's now ",(0,r.kt)("strong",{parentName:"p"},"run")," a docker container with the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image that we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will result in the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,r.kt)("p",null,"Let's run a docker container and change the value of the ",(0,r.kt)("inlineCode",{parentName:"p"},"TEST")," env var at the time of running the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9371],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=a.createContext({}),p=function(e){var t=a.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},m=function(e){var t=p(e.components);return a.createElement(c.Provider,{value:t},e.children)},s="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,c=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),s=p(n),u=r,k=s["".concat(c,".").concat(u)]||s[u]||d[u]||o;return n?a.createElement(k,i(i({ref:t},m),{},{components:n})):a.createElement(k,i({ref:t},m))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=u;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[s]="string"==typeof e?e:r,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/images",id:"prerequisites/docker/images",title:"[Practice] Docker images",description:"Practice to use docker image.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/images.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/images",permalink:"/en/docs/prerequisites/docker/images",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/images.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"[Practice] Docker images",description:"Practice to use docker image.",sidebar_position:5,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker command",permalink:"/en/docs/prerequisites/docker/command"},next:{title:"[Practice] Docker Advanced",permalink:"/en/docs/prerequisites/docker/advanced"}},c={},p=[],m={toc:p},s="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(s,(0,a.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"running container \ub97c docker image \ub85c \ub9cc\ub4dc\ub294 \ubc29\ubc95"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},'docker commit -m "message" -a "author" ')),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"docker commit")," \uc744 \uc0ac\uc6a9\ud558\uba74, \uc218\ub3d9\uc73c\ub85c Dockerfile \uc744 \ub9cc\ub4e4\uc9c0 \uc54a\uace0\ub3c4 \ub3c4\ucee4 \uc774\ubbf8\uc9c0\ub97c \ub9cc\ub4e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4.",(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre"},"touch Dockerfile\n")))))),(0,r.kt)("ol",{start:3},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Move to the docker-practice folder.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Create an empty file called Dockerfile.")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"\uc774\ubbf8\uc9c0\uc5d0 \ud2b9\uc815 \ud328\ud0a4\uc9c0\ub97c \uc124\uce58\ud558\ub294 \uba85\ub839\uc5b4\ub294 \ubb34\uc5c7\uc785\ub2c8\uae4c?"))),(0,r.kt)("p",null,"Answer: ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN")),(0,r.kt)("p",null,"Translation: Let's look at the basic commands that can be used in Dockerfile one by one. FROM is a command that specifies which image to use as a base image for Dockerfile. When creating a Docker image, instead of creating the environment I intend from scratch, I can use a pre-made image such as ",(0,r.kt)("inlineCode",{parentName:"p"},"python:3.9"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"python-3.9-alpine"),", etc. as the base and install pytorch and add my source code."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"FROM [:] [AS ]\n\n# \uc608\uc2dc\nFROM ubuntu\nFROM ubuntu:18.04\nFROM nginx:latest AS ngx\n")),(0,r.kt)("p",null,"The command to copy files or directories from the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path on the host (local) to the ",(0,r.kt)("inlineCode",{parentName:"p"},"")," path inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"COPY ... \n\n# \uc608\uc2dc\nCOPY a.txt /some-directory/b.txt\nCOPY my-directory /some-directory-2\n")),(0,r.kt)("p",null,"ADD is similar to COPY but it has additional features."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# 1 - \ud638\uc2a4\ud2b8\uc5d0 \uc555\ucd95\ub418\uc5b4\uc788\ub294 \ud30c\uc77c\uc744 \ud480\uba74\uc11c \ucee8\ud14c\uc774\ub108 \ub0b4\ubd80\ub85c copy \ud560 \uc218 \uc788\uc74c\nADD scripts.tar.gz /tmp\n# 2 - Remote URLs \uc5d0 \uc788\ub294 \ud30c\uc77c\uc744 \uc18c\uc2a4 \uacbd\ub85c\ub85c \uc9c0\uc815\ud560 \uc218 \uc788\uc74c\nADD http://www.example.com/script.sh /tmp\n\n# \uc704 \ub450 \uac00\uc9c0 \uae30\ub2a5\uc744 \uc0ac\uc6a9\ud558\uace0 \uc2f6\uc744 \uacbd\uc6b0\uc5d0\ub9cc COPY \ub300\uc2e0 ADD \ub97c \uc0ac\uc6a9\ud558\ub294 \uac83\uc744 \uad8c\uc7a5\n")),(0,r.kt)("p",null,"The command to run the specified command inside a Docker container.\nDocker images maintain the state in which the commands are executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'RUN \nRUN ["executable-command", "parameter1", "parameter2"]\n\n# \uc608\uc2dc\nRUN pip install torch\nRUN pip install -r requirements.txt\n')),(0,r.kt)("p",null,"CMD specifies a command that the Docker container will ",(0,r.kt)("strong",{parentName:"p"},"run when it starts"),". There is a similar command called ",(0,r.kt)("strong",{parentName:"p"},"ENTRYPOINT"),". The difference between them will be discussed ",(0,r.kt)("strong",{parentName:"p"},"later"),". Note that only one ",(0,r.kt)("strong",{parentName:"p"},"CMD")," can be run in one Docker image, which is different from ",(0,r.kt)("strong",{parentName:"p"},"RUN")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'CMD \nCMD ["executable-command", "parameter1", "parameter2"]\nCMD ["parameter1", "parameter2"] # ENTRYPOINT \uc640 \ud568\uaed8 \uc0ac\uc6a9\ub420 \ub54c\n\n# \uc608\uc2dc\nCMD python main.py\n')),(0,r.kt)("p",null,"WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"WORKDIR /path/to/workdir\n\n# \uc608\uc2dc\nWORKDIR /home/demo\nRUN pwd # /home/demo \uac00 \ucd9c\ub825\ub428\n")),(0,r.kt)("p",null,"This is a command to set the value of environment variables that will be used continuously inside the container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"ENV \nENV =\n\n# \uc608\uc2dc\n# default \uc5b8\uc5b4 \uc124\uc815\nRUN locale-gen ko_KR.UTF-8\nENV LANG ko_KR.UTF-8\nENV LANGUAGE ko_KR.UTF-8\nENV LC_ALL ko_KR.UTF-8\n")),(0,r.kt)("p",null,"You can specify the port/protocol to be opened from the container. If ",(0,r.kt)("inlineCode",{parentName:"p"},"")," is not specified, TCP is set as the default."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"EXPOSE \nEXPOSE /\n\n# \uc608\uc2dc\nEXPOSE 8080\n")),(0,r.kt)("p",null,"Write a simple Dockerfile by using ",(0,r.kt)("inlineCode",{parentName:"p"},"vim Dockerfile")," or an editor like vscode and write the following:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# base image \ub97c ubuntu 18.04 \ub85c \uc124\uc815\ud569\ub2c8\ub2e4.\nFROM ubuntu:18.04\n\n# apt-get update \uba85\ub839\uc744 \uc2e4\ud589\ud569\ub2c8\ub2e4.\nRUN apt-get update\n\n# TEST env var\uc758 \uac12\uc744 hello \ub85c \uc9c0\uc815\ud569\ub2c8\ub2e4.\nENV TEST hello\n\n# DOCKER CONTAINER \uac00 \uc2dc\uc791\ub420 \ub54c, \ud658\uacbd\ubcc0\uc218 TEST \uc758 \uac12\uc744 \ucd9c\ub825\ud569\ub2c8\ub2e4.\nCMD echo $TEST\n")),(0,r.kt)("p",null,"Use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," command to create a Docker Image from a Dockerfile."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build --help\n")),(0,r.kt)("p",null,"Run the following command from the path where the Dockerfile is located."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker build -t my-image:v1.0.0 .\n")),(0,r.kt)("p",null,'The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let\'s check if the image was built successfully.'),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"# grep : my-image \uac00 \uc788\ub294\uc9c0\ub97c \uc7a1\uc544\ub0b4\ub294 (grep) \ud558\ub294 \uba85\ub839\uc5b4\ndocker images | grep my-image\n")),(0,r.kt)("p",null,"If performed normally, it will output as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"my-image v1.0.0 143114710b2d 3 seconds ago 87.9MB\n")),(0,r.kt)("p",null,"Let's now ",(0,r.kt)("strong",{parentName:"p"},"run")," a docker container with the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image that we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will result in the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"hello\n")),(0,r.kt)("p",null,"Let's run a docker container and change the value of the ",(0,r.kt)("inlineCode",{parentName:"p"},"TEST")," env var at the time of running the ",(0,r.kt)("inlineCode",{parentName:"p"},"my-image:v1.0.0")," image we just built."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -e TEST=bye my-image:v1.0.0\n")),(0,r.kt)("p",null,"If performed normally, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"bye\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/8db8515f.1e663163.js b/en/assets/js/8db8515f.76c43018.js similarity index 99% rename from en/assets/js/8db8515f.1e663163.js rename to en/assets/js/8db8515f.76c43018.js index 7888bdab..52bf4913 100644 --- a/en/assets/js/8db8515f.1e663163.js +++ b/en/assets/js/8db8515f.76c43018.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2461],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=o.createContext({}),p=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},s=function(e){var t=p(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),d=p(n),f=a,m=d["".concat(c,".").concat(f)]||d[f]||u[f]||i;return n?o.createElement(m,r(r({ref:t},s),{},{components:n})):o.createElement(m,r({ref:t},s))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,r=new Array(i);r[0]=f;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[d]="string"==typeof e?e:a,r[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const i={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},r=void 0,l={unversionedId:"kubeflow/kubeflow-concepts",id:"version-1.0/kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/en/docs/1.0/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-concepts.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/en/docs/1.0/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/en/docs/1.0/kubeflow/basic-requirements"}},c={},p=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:p},d="wrapper";function u(e){let{components:t,...i}=e;return(0,a.kt)(d,(0,o.Z)({},s,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"component"},"Component"),(0,a.kt)("p",null,"A component is composed of Component contents and a Component wrapper.\nA single component is delivered to Kubeflow through a Component wrapper and the delivered component executes the defined Component contents and produces artifacts."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-0.png",src:n(4106).Z,width:"1392",height:"704"})),(0,a.kt)("h3",{id:"component-contents"},"Component Contents"),(0,a.kt)("p",null,"There are three components that make up the component contents:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-1.png",src:n(125).Z,width:"574",height:"436"})),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"Environment"),(0,a.kt)("li",{parentName:"ol"},"Python code w/ Config"),(0,a.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,a.kt)("p",null,"Let's explore each component with an example.\nHere is a Python code that loads data, trains an SVC (Support Vector Classifier) model, and saves the SVC model."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,a.kt)("p",null,"The above Python code can be divided into components contents as follows."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-2.png",src:n(9323).Z,width:"832",height:"410"})),(0,a.kt)("p",null,"Environment is the part of the Python code where the packages used in the code are imported.",(0,a.kt)("br",{parentName:"p"}),"\n","Next, Python Code w\\ Config is where the given Config is used to actually perform the training.",(0,a.kt)("br",{parentName:"p"}),"\n","Finally, there is a process to save the artifacts. "),(0,a.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,a.kt)("p",null,"Component wrappers deliver the necessary Config and execute tasks for component content."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-3.png",src:n(3671).Z,width:"1066",height:"766"})),(0,a.kt)("p",null,"In Kubeflow, component wrappers are defined as functions, similar to the ",(0,a.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv")," example above.\nWhen a component wrapper wraps the contents, it looks like the following:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-4.png",src:n(5133).Z,width:"464",height:"826"})),(0,a.kt)("h3",{id:"artifacts"},"Artifacts"),(0,a.kt)("p",null,"In the explanation above, it was mentioned that the component creates Artifacts. Artifacts is a term used to refer to any form of a file that is generated, such as evaluation results, logs, etc.\nOf the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-5.png",src:n(9358).Z,width:"1700",height:"454"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model"),(0,a.kt)("li",{parentName:"ul"},"Data"),(0,a.kt)("li",{parentName:"ul"},"Metric"),(0,a.kt)("li",{parentName:"ul"},"etc")),(0,a.kt)("h4",{id:"model"},"Model"),(0,a.kt)("p",null,"We defined the model as follows: "),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.")),(0,a.kt)("h4",{id:"data"},"Data"),(0,a.kt)("p",null,"Data includes preprocessed features, model predictions, etc. "),(0,a.kt)("h4",{id:"metric"},"Metric"),(0,a.kt)("p",null,"Metric is divided into two categories: dynamic metrics and static metrics."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch."),(0,a.kt)("li",{parentName:"ul"},"Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.")),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-6.png",src:n(2196).Z,width:"1696",height:"746"})),(0,a.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,a.kt)("p",null,"As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-7.png",src:n(9160).Z,width:"1810",height:"432"})),(0,a.kt)("h2",{id:"run"},"Run"),(0,a.kt)("p",null,'To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."'),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-8.png",src:n(9135).Z,width:"1810",height:"576"})),(0,a.kt)("p",null,"When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-9.png",src:n(440).Z,width:"1810",height:"592"})),(0,a.kt)("p",null,"Now, let's learn how to write components and pipelines."))}u.isMDXComponent=!0},4106:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},125:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},9323:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},3671:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},5133:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},9358:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},2196:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},9160:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},9135:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},440:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2461],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=o.createContext({}),p=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},s=function(e){var t=p(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),d=p(n),f=a,m=d["".concat(c,".").concat(f)]||d[f]||u[f]||i;return n?o.createElement(m,r(r({ref:t},s),{},{components:n})):o.createElement(m,r({ref:t},s))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,r=new Array(i);r[0]=f;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[d]="string"==typeof e?e:a,r[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const i={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},r=void 0,l={unversionedId:"kubeflow/kubeflow-concepts",id:"version-1.0/kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/en/docs/1.0/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/kubeflow-concepts.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/en/docs/1.0/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/en/docs/1.0/kubeflow/basic-requirements"}},c={},p=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:p},d="wrapper";function u(e){let{components:t,...i}=e;return(0,a.kt)(d,(0,o.Z)({},s,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"component"},"Component"),(0,a.kt)("p",null,"A component is composed of Component contents and a Component wrapper.\nA single component is delivered to Kubeflow through a Component wrapper and the delivered component executes the defined Component contents and produces artifacts."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-0.png",src:n(4106).Z,width:"1392",height:"704"})),(0,a.kt)("h3",{id:"component-contents"},"Component Contents"),(0,a.kt)("p",null,"There are three components that make up the component contents:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-1.png",src:n(125).Z,width:"574",height:"436"})),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"Environment"),(0,a.kt)("li",{parentName:"ol"},"Python code w/ Config"),(0,a.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,a.kt)("p",null,"Let's explore each component with an example.\nHere is a Python code that loads data, trains an SVC (Support Vector Classifier) model, and saves the SVC model."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,a.kt)("p",null,"The above Python code can be divided into components contents as follows."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-2.png",src:n(9323).Z,width:"832",height:"410"})),(0,a.kt)("p",null,"Environment is the part of the Python code where the packages used in the code are imported.",(0,a.kt)("br",{parentName:"p"}),"\n","Next, Python Code w\\ Config is where the given Config is used to actually perform the training.",(0,a.kt)("br",{parentName:"p"}),"\n","Finally, there is a process to save the artifacts. "),(0,a.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,a.kt)("p",null,"Component wrappers deliver the necessary Config and execute tasks for component content."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-3.png",src:n(3671).Z,width:"1066",height:"766"})),(0,a.kt)("p",null,"In Kubeflow, component wrappers are defined as functions, similar to the ",(0,a.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv")," example above.\nWhen a component wrapper wraps the contents, it looks like the following:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-4.png",src:n(5133).Z,width:"464",height:"826"})),(0,a.kt)("h3",{id:"artifacts"},"Artifacts"),(0,a.kt)("p",null,"In the explanation above, it was mentioned that the component creates Artifacts. Artifacts is a term used to refer to any form of a file that is generated, such as evaluation results, logs, etc.\nOf the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-5.png",src:n(9358).Z,width:"1700",height:"454"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model"),(0,a.kt)("li",{parentName:"ul"},"Data"),(0,a.kt)("li",{parentName:"ul"},"Metric"),(0,a.kt)("li",{parentName:"ul"},"etc")),(0,a.kt)("h4",{id:"model"},"Model"),(0,a.kt)("p",null,"We defined the model as follows: "),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.")),(0,a.kt)("h4",{id:"data"},"Data"),(0,a.kt)("p",null,"Data includes preprocessed features, model predictions, etc. "),(0,a.kt)("h4",{id:"metric"},"Metric"),(0,a.kt)("p",null,"Metric is divided into two categories: dynamic metrics and static metrics."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch."),(0,a.kt)("li",{parentName:"ul"},"Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.")),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-6.png",src:n(2196).Z,width:"1696",height:"746"})),(0,a.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,a.kt)("p",null,"As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-7.png",src:n(9160).Z,width:"1810",height:"432"})),(0,a.kt)("h2",{id:"run"},"Run"),(0,a.kt)("p",null,'To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."'),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-8.png",src:n(9135).Z,width:"1810",height:"576"})),(0,a.kt)("p",null,"When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-9.png",src:n(440).Z,width:"1810",height:"592"})),(0,a.kt)("p",null,"Now, let's learn how to write components and pipelines."))}u.isMDXComponent=!0},4106:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},125:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},9323:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},3671:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},5133:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},9358:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},2196:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},9160:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},9135:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},440:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file diff --git a/en/assets/js/92551a41.85212c92.js b/en/assets/js/92551a41.8c50ec70.js similarity index 99% rename from en/assets/js/92551a41.85212c92.js rename to en/assets/js/92551a41.8c50ec70.js index 4f70fb65..14bb58b2 100644 --- a/en/assets/js/92551a41.85212c92.js +++ b/en/assets/js/92551a41.8c50ec70.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1414],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),s=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=s(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,l=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),d=s(n),m=o,h=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(h,i(i({ref:t},u),{},{components:n})):a.createElement(h,i({ref:t},u))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,i=new Array(r);i[0]=m;var p={};for(var l in t)hasOwnProperty.call(t,l)&&(p[l]=t[l]);p.originalType=e,p[d]="string"==typeof e?e:o,i[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>p,toc:()=>s});var a=n(7462),o=(n(7294),n(3905));const r={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-component",id:"version-1.0/kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/en/docs/1.0/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/en/docs/1.0/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/en/docs/1.0/kubeflow/advanced-environment"}},l={},s=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule for using InputPath/OutputPath",id:"rule-for-using-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],u={toc:s},d="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,o.kt)("p",null,"On this page, we will write the code example from ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," as a component."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"Below is the component content used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"With the necessary Configs for the Component Wrapper, it will look like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"In the ","[Basic Usage Component]","](../kubeflow/basic-component), we explained that you should provide type hints for input and output when describing. But what about complex objects such as dataframes, models, that cannot be used in json?"),(0,o.kt)("p",null,"When passing values between functions in Python, objects can be returned and their value will be stored in the host's memory, so the same object can be used in the next function. However, in Kubeflow, components are running independently on each container, that is, they are not sharing the same memory, so you cannot pass objects in the same way as in a normal Python function. The only information that can be passed between components is in ",(0,o.kt)("inlineCode",{parentName:"p"},"json")," format. Therefore, objects of types that cannot be converted into json format such as Model or DataFrame must be passed in some other way."),(0,o.kt)("p",null,"Kubeflow solves this by storing the data in a file instead of memory, and then using the file to pass information. Since the path of the stored file is a string, it can be passed between components. However, in Kubeflow, the user does not know the path of the file before the execution. For this, Kubeflow provides a magic related to the input and output paths, ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath"),"."),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," literally means the input path, and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," literally means the output path."),(0,o.kt)("p",null,"For example, in a component that generates and returns data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()")," is created as an argument. And in a component that receives data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()")," is created as an argument."),(0,o.kt)("p",null,"Once these are created, when connecting them in a pipeline, Kubeflow automatically generates and inputs the necessary paths. Therefore, users no longer need to worry about the paths and only need to consider the relationships between components."),(0,o.kt)("p",null,"Based on this information, when rewriting the component wrapper, it would look like the following."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"InputPath or OutputPath can accept a string. This string is the format of the file to be input or output.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it does not necessarily mean that the file has to be stored in this format.",(0,o.kt)("br",{parentName:"p"}),"\n","It just serves as a helper for type checking when compiling the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","If the file format is not fixed, then no input is needed (it serves the role of something like ",(0,o.kt)("inlineCode",{parentName:"p"},"Any")," in type hints)."),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"Convert the written component into a format that can be used in Kubeflow."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"rule-for-using-inputpathoutputpath"},"Rule for using InputPath/OutputPath"),(0,o.kt)("p",null,"There are rules to follow when using InputPath or OutputPath arguments in pipeline."),(0,o.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,o.kt)("p",null,"To execute the previously written component, a component that generates data is created since data is required."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,o.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,o.kt)("p",null,"Now let's write the pipeline."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,o.kt)("p",null,"Have you noticed something strange?",(0,o.kt)("br",{parentName:"p"}),"\n","All the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffixes have disappeared from the arguments received in the input and output.",(0,o.kt)("br",{parentName:"p"}),"\n","We can see that instead of accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]'),", we are accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]'),".",(0,o.kt)("br",{parentName:"p"}),"\n","This happens because Kubeflow has a rule that paths created with ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," can be accessed without the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffix when accessed from the pipeline."),(0,o.kt)("p",null,"However, if you upload the pipeline just written, it will not run.",(0,o.kt)("br",{parentName:"p"}),"\n","The reason is explained on the next page."))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1414],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>h});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),s=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=s(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,r=e.originalType,l=e.parentName,u=p(e,["components","mdxType","originalType","parentName"]),d=s(n),m=o,h=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(h,i(i({ref:t},u),{},{components:n})):a.createElement(h,i({ref:t},u))}));function h(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var r=n.length,i=new Array(r);i[0]=m;var p={};for(var l in t)hasOwnProperty.call(t,l)&&(p[l]=t[l]);p.originalType=e,p[d]="string"==typeof e?e:o,i[1]=p;for(var s=2;s{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>c,frontMatter:()=>r,metadata:()=>p,toc:()=>s});var a=n(7462),o=(n(7294),n(3905));const r={title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,p={unversionedId:"kubeflow/advanced-component",id:"version-1.0/kubeflow/advanced-component",title:"8. Component - InputPath/OutputPath",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-component.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-component",permalink:"/en/docs/1.0/kubeflow/advanced-component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:8,frontMatter:{title:"8. Component - InputPath/OutputPath",description:"",sidebar_position:8,contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"7. Pipeline - Run",permalink:"/en/docs/1.0/kubeflow/basic-run"},next:{title:"9. Component - Environment",permalink:"/en/docs/1.0/kubeflow/advanced-environment"}},l={},s=[{value:"Complex Outputs",id:"complex-outputs",level:2},{value:"Component Contents",id:"component-contents",level:2},{value:"Component Wrapper",id:"component-wrapper",level:2},{value:"Define a standalone Python function",id:"define-a-standalone-python-function",level:3},{value:"Convert to Kubeflow Format",id:"convert-to-kubeflow-format",level:3},{value:"Rule for using InputPath/OutputPath",id:"rule-for-using-inputpathoutputpath",level:2},{value:"Load Data Component",id:"load-data-component",level:3},{value:"Write Pipeline",id:"write-pipeline",level:3}],u={toc:s},d="wrapper";function c(e){let{components:t,...n}=e;return(0,o.kt)(d,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"complex-outputs"},"Complex Outputs"),(0,o.kt)("p",null,"On this page, we will write the code example from ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts")," as a component."),(0,o.kt)("h2",{id:"component-contents"},"Component Contents"),(0,o.kt)("p",null,"Below is the component content used in ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/kubeflow-concepts#component-contents"},"Kubeflow Concepts"),"."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target = pd.read_csv(train_target_path)\n\nclf = SVC(kernel=kernel)\nclf.fit(train_data, train_target)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"component-wrapper"},"Component Wrapper"),(0,o.kt)("h3",{id:"define-a-standalone-python-function"},"Define a standalone Python function"),(0,o.kt)("p",null,"With the necessary Configs for the Component Wrapper, it will look like this."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'def train_from_csv(\n train_data_path: str,\n train_target_path: str,\n model_path: str,\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"In the ","[Basic Usage Component]","](../kubeflow/basic-component), we explained that you should provide type hints for input and output when describing. But what about complex objects such as dataframes, models, that cannot be used in json?"),(0,o.kt)("p",null,"When passing values between functions in Python, objects can be returned and their value will be stored in the host's memory, so the same object can be used in the next function. However, in Kubeflow, components are running independently on each container, that is, they are not sharing the same memory, so you cannot pass objects in the same way as in a normal Python function. The only information that can be passed between components is in ",(0,o.kt)("inlineCode",{parentName:"p"},"json")," format. Therefore, objects of types that cannot be converted into json format such as Model or DataFrame must be passed in some other way."),(0,o.kt)("p",null,"Kubeflow solves this by storing the data in a file instead of memory, and then using the file to pass information. Since the path of the stored file is a string, it can be passed between components. However, in Kubeflow, the user does not know the path of the file before the execution. For this, Kubeflow provides a magic related to the input and output paths, ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath"),"."),(0,o.kt)("p",null,(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," literally means the input path, and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," literally means the output path."),(0,o.kt)("p",null,"For example, in a component that generates and returns data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: OutputPath()")," is created as an argument. And in a component that receives data, ",(0,o.kt)("inlineCode",{parentName:"p"},"data_path: InputPath()")," is created as an argument."),(0,o.kt)("p",null,"Once these are created, when connecting them in a pipeline, Kubeflow automatically generates and inputs the necessary paths. Therefore, users no longer need to worry about the paths and only need to consider the relationships between components."),(0,o.kt)("p",null,"Based on this information, when rewriting the component wrapper, it would look like the following."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath\n\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("p",null,"InputPath or OutputPath can accept a string. This string is the format of the file to be input or output.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it does not necessarily mean that the file has to be stored in this format.",(0,o.kt)("br",{parentName:"p"}),"\n","It just serves as a helper for type checking when compiling the pipeline.",(0,o.kt)("br",{parentName:"p"}),"\n","If the file format is not fixed, then no input is needed (it serves the role of something like ",(0,o.kt)("inlineCode",{parentName:"p"},"Any")," in type hints)."),(0,o.kt)("h3",{id:"convert-to-kubeflow-format"},"Convert to Kubeflow Format"),(0,o.kt)("p",null,"Convert the written component into a format that can be used in Kubeflow."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n\n from sklearn.svm import SVC\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,o.kt)("h2",{id:"rule-for-using-inputpathoutputpath"},"Rule for using InputPath/OutputPath"),(0,o.kt)("p",null,"There are rules to follow when using InputPath or OutputPath arguments in pipeline."),(0,o.kt)("h3",{id:"load-data-component"},"Load Data Component"),(0,o.kt)("p",null,"To execute the previously written component, a component that generates data is created since data is required."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@create_component_from_func\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n')),(0,o.kt)("h3",{id:"write-pipeline"},"Write Pipeline"),(0,o.kt)("p",null,"Now let's write the pipeline."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="complex_pipeline")\ndef complex_pipeline(kernel: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n')),(0,o.kt)("p",null,"Have you noticed something strange?",(0,o.kt)("br",{parentName:"p"}),"\n","All the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffixes have disappeared from the arguments received in the input and output.",(0,o.kt)("br",{parentName:"p"}),"\n","We can see that instead of accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data_path"]'),", we are accessing ",(0,o.kt)("inlineCode",{parentName:"p"},'iris_data.outputs["data"]'),".",(0,o.kt)("br",{parentName:"p"}),"\n","This happens because Kubeflow has a rule that paths created with ",(0,o.kt)("inlineCode",{parentName:"p"},"InputPath")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"OutputPath")," can be accessed without the ",(0,o.kt)("inlineCode",{parentName:"p"},"_path")," suffix when accessed from the pipeline."),(0,o.kt)("p",null,"However, if you upload the pipeline just written, it will not run.",(0,o.kt)("br",{parentName:"p"}),"\n","The reason is explained on the next page."))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/98e51aae.9c1e81fb.js b/en/assets/js/98e51aae.ad36baba.js similarity index 99% rename from en/assets/js/98e51aae.9c1e81fb.js rename to en/assets/js/98e51aae.ad36baba.js index c733bba8..0ded0f28 100644 --- a/en/assets/js/98e51aae.9c1e81fb.js +++ b/en/assets/js/98e51aae.ad36baba.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3940],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,a=e.originalType,u=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),p=o(n),k=s,f=p["".concat(u,".").concat(k)]||p[k]||d[k]||a;return n?r.createElement(f,l(l({ref:t},c),{},{components:n})):r.createElement(f,l({ref:t},c))}));function f(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var a=n.length,l=new Array(a);l[0]=k;var i={};for(var u in t)hasOwnProperty.call(t,u)&&(i[u]=t[u]);i.originalType=e,i[p]="string"==typeof e?e:s,l[1]=i;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>i,toc:()=>o});var r=n(7462),s=(n(7294),n(3905));const a={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:o},p="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,s.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster"),"."),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,s.kt)("p",null,"k3s uses containerd as the backend by default.\nHowever, we need to use docker as the backend to use GPU, so we will install the backend with the ",(0,s.kt)("inlineCode",{parentName:"p"},"--docker")," option."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,s.kt)("p",null,"After installing k3s, check the k3s config."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,s.kt)("p",null,"If installed correctly, the following items will be output. (Security related keys are hidden with <...>.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,s.kt)("p",null,"Set up the Kubernetes cluster by copying the k3s config to be used as the cluster\u2019s kubeconfig."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,s.kt)("p",null,"Grant user access permission to the copied config file."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,s.kt)("h2",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,s.kt)("p",null,"Now move the kubeconfig configured in the cluster to the local.\nSet the path to ",(0,s.kt)("inlineCode",{parentName:"p"},"~/.kube/config")," on the local."),(0,s.kt)("p",null,"The config file copied at first has the server ip set to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443"),".\nModify this value to match the ip of the cluster.\n(We modified it to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," to match the ip of the cluster used in this page.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"helm"),(0,s.kt)("li",{parentName:"ul"},"kustomize"),(0,s.kt)("li",{parentName:"ul"},"CSI plugin"),(0,s.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,s.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,s.kt)("p",null,"Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,s.kt)("p",null,"If you see the following message, it means that the installation was successful."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,s.kt)("h2",{id:"6-references"},"6. References"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3940],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>f});var r=n(7294);function s(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(s[n]=e[n]);return s}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(s[n]=e[n])}return s}var u=r.createContext({}),o=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=o(e.components);return r.createElement(u.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,s=e.mdxType,a=e.originalType,u=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),p=o(n),k=s,f=p["".concat(u,".").concat(k)]||p[k]||d[k]||a;return n?r.createElement(f,l(l({ref:t},c),{},{components:n})):r.createElement(f,l({ref:t},c))}));function f(e,t){var n=arguments,s=t&&t.mdxType;if("string"==typeof e||s){var a=n.length,l=new Array(a);l[0]=k;var i={};for(var u in t)hasOwnProperty.call(t,u)&&(i[u]=t[u]);i.originalType=e,i[p]="string"==typeof e?e:s,l[1]=i;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>i,toc:()=>o});var r=n(7462),s=(n(7294),n(3905));const a={title:"4.1. K3s",description:"",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},l=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-k3s",id:"version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",title:"4.1. K3s",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"4.1. K3s",description:"",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",draft:!1,weight:221,contributors:["Jongseob Jeon"],menu:{docs:'parent:../setup-kubernetes"'},images:[]},sidebar:"tutorialSidebar",previous:{title:"3. Install Prerequisite",permalink:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},next:{title:"4.3. Kubeadm",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:2},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2},{value:"6. References",id:"6-references",level:2}],c={toc:o},p="wrapper";function d(e){let{components:t,...n}=e;return(0,s.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,s.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster"),"."),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,s.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,s.kt)("p",null,"k3s uses containerd as the backend by default.\nHowever, we need to use docker as the backend to use GPU, so we will install the backend with the ",(0,s.kt)("inlineCode",{parentName:"p"},"--docker")," option."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker\n")),(0,s.kt)("p",null,"After installing k3s, check the k3s config."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo cat /etc/rancher/k3s/k3s.yaml\n")),(0,s.kt)("p",null,"If installed correctly, the following items will be output. (Security related keys are hidden with <...>.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://127.0.0.1:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,s.kt)("p",null,"Set up the Kubernetes cluster by copying the k3s config to be used as the cluster\u2019s kubeconfig."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"mkdir .kube\nsudo cp /etc/rancher/k3s/k3s.yaml .kube/config\n")),(0,s.kt)("p",null,"Grant user access permission to the copied config file."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"sudo chown $USER:$USER .kube/config\n")),(0,s.kt)("h2",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,s.kt)("p",null,"Now move the kubeconfig configured in the cluster to the local.\nSet the path to ",(0,s.kt)("inlineCode",{parentName:"p"},"~/.kube/config")," on the local."),(0,s.kt)("p",null,"The config file copied at first has the server ip set to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://127.0.0.1:6443"),".\nModify this value to match the ip of the cluster.\n(We modified it to ",(0,s.kt)("inlineCode",{parentName:"p"},"https://192.168.0.19:6443")," to match the ip of the cluster used in this page.)"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n- cluster:\n certificate-authority-data:\n <...>\n server: https://192.168.0.19:6443\n name: default\ncontexts:\n- context:\n cluster: default\n user: default\n name: default\ncurrent-context: default\nkind: Config\npreferences: {}\nusers:\n- name: default\n user:\n client-certificate-data:\n <...>\n client-key-data:\n <...>\n")),(0,s.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,s.kt)("p",null,"Please refer to ",(0,s.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},"helm"),(0,s.kt)("li",{parentName:"ul"},"kustomize"),(0,s.kt)("li",{parentName:"ul"},"CSI plugin"),(0,s.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,s.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,s.kt)("p",null,"Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,s.kt)("p",null,"If you see the following message, it means that the installation was successful."),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")),(0,s.kt)("h2",{id:"6-references"},"6. References"),(0,s.kt)("ul",null,(0,s.kt)("li",{parentName:"ul"},(0,s.kt)("a",{parentName:"li",href:"https://rancher.com/docs/k3s/latest/en/installation/install-options/"},"https://rancher.com/docs/k3s/latest/en/installation/install-options/"))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/99b17c27.f01f4e76.js b/en/assets/js/99b17c27.f800372e.js similarity index 99% rename from en/assets/js/99b17c27.f01f4e76.js rename to en/assets/js/99b17c27.f800372e.js index 48787f3c..32dd720c 100644 --- a/en/assets/js/99b17c27.f01f4e76.js +++ b/en/assets/js/99b17c27.f800372e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7465],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},u=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||o;return n?a.createElement(k,l(l({ref:t},u),{},{components:n})):a.createElement(k,l({ref:t},u))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[p]="string"==typeof e?e:r,l[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/command",id:"version-1.0/prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/en/docs/1.0/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/command.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/en/docs/1.0/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/en/docs/1.0/prerequisites/docker/images"}},s={},c=[{value:"1. Normal installation confirmation",id:"1-normal-installation-confirmation",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(p,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-normal-installation-confirmation"},"1. Normal installation confirmation"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"If installed correctly, you should be able to see the following message."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," If you want to use without sudo, please refer to the following site."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"Docker pull is a command to download Docker images from a Docker image registry (a repository where Docker images are stored and shared)."),(0,r.kt)("p",null,"You can check the arguments available in docker pull using the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"If performed normally, it prints out as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"It can be seen here that docker pull takes two types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"In order to use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," options from help, they must be used before the NAME.\nLet's try and pull the ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," image directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"If interpreted correctly, the command means to pull an image with the tag ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," from an image named ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu"),"."),(0,r.kt)("p",null,"If performed successfully, it will produce an output similar to the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"If you perform the above command, you will download the image called 'ubuntu:18.04' from a registry named ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," to your laptop."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Note that ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"in the future, if you need to get a docker image from a certain ",(0,r.kt)("strong",{parentName:"li"},"private")," registry instead of docker.io or public docker hub, you can use ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," to point to the certain registry, then use ",(0,r.kt)("inlineCode",{parentName:"li"},"docker pull"),". Alternatively, you can set up an ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"insecure registry"),". "),(0,r.kt)("li",{parentName:"ul"},"Also note that ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save"))," and ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," commands are available to store and share docker images in the form of ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," file in an intranet.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"This is the command to list the Docker images that exist locally."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"The arguments available for use in docker images are as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"Let's try executing the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"If you install Docker and proceed with this practice, it will output something similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"If you use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," argument among the possible arguments, only the ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," will be printed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"Command to output the list of currently running Docker containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"Use the following arguments can be used with 'docker ps':"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"Let's try running the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"If there are no currently running containers, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"If there is a container running, it will look similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"Command to run a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"The command to run docker run is as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"What we need to confirm here is that the docker run command takes three types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"Let's try running a docker container ourselves."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it"),": Combination of ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," and ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," options",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Runs the container and connects it to an interactive terminal"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name"),": Assigns a name to the container for easier identification instead of using the container ID"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),": Specifies the command to be executed in the container upon startup, where ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," opens a bash shell.")),(0,r.kt)("p",null,"After running the command, you can exit the container by using the ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," command."),(0,r.kt)("p",null,"When you enter the previously learned ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," command, the following output will be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"It was said that the container being executed was coming out, but for some reason the container that was just executed does not appear. The reason is that ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," shows the currently running containers by default. If you want to see the stopped containers too, you must give the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," option."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"Then the list of terminated containers will also be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker exec is a command that is used to issue commands or access the inside of a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"For example, let's try running the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"Here, the ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," option is a command that allows the Docker container to run in the background so that even if the connection ends to the container, it continues to run."),(0,r.kt)("p",null,"Use ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," to check if it is currently running."),(0,r.kt)("p",null,"It can be confirmed that it is running as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"Now let's connect to the running docker container through the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"This is the same as the previous ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command, allowing you to access the inside of the container."),(0,r.kt)("p",null,"You can exit using ",(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"I will have the following container be executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,'By using the above command, we have set up a busybox container named "test" as a Docker container in the background and printed the current time once every second.'),(0,r.kt)("p",null,"Now let's check the log with the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"If performed normally, it will be similar to below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"However, if used this way, you can only check the logs taken so far.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, you can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," option to keep watching and outputting."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"Command to stop a running Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,"Through ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", you can check the containers currently running, as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"Now let's try to stop Docker with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"After executing, type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," again."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"Comparing with the above result, you can see that the demo2 container has disappeared from the list of currently running containers.\nThe rest of the containers will also be stopped."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("p",null,"Docker rm: Command to delete a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"Docker containers are in a stopped state by default. That's why you can see stopped containers using ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),".\nBut why do we have to delete the stopped containers?",(0,r.kt)("br",{parentName:"p"}),"\n","Even when stopped, the data used in the Docker remains in the container.\nSo you can restart the container through restarting. But this process will use disk.\nSo\nin order to delete the containers that are not used at all, we should use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," command."),(0,r.kt)("p",null," First, let's check the current containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"There are three containers as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"Let's try to delete the 'demo3' container through the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,"The command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," reduced it to two lines as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"Delete the remaining containers as well."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"Command to delete a Docker image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"Use the following commands to check which images are currently on the local."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"The following is output."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"I will try to delete the ",(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"If you type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," again, the following will appear."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7465],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},u=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||d[m]||o;return n?a.createElement(k,l(l({ref:t},u),{},{components:n})):a.createElement(k,l({ref:t},u))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,l=new Array(o);l[0]=m;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[p]="string"==typeof e?e:r,l[1]=i;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},l=void 0,i={unversionedId:"prerequisites/docker/command",id:"version-1.0/prerequisites/docker/command",title:"[Practice] Docker command",description:"Practice to use docker command.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/command.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/command",permalink:"/en/docs/1.0/prerequisites/docker/command",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/command.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"[Practice] Docker command",description:"Practice to use docker command.",sidebar_position:4,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"What is Docker?",permalink:"/en/docs/1.0/prerequisites/docker/"},next:{title:"[Practice] Docker images",permalink:"/en/docs/1.0/prerequisites/docker/images"}},s={},c=[{value:"1. Normal installation confirmation",id:"1-normal-installation-confirmation",level:2},{value:"2. Docker Pull",id:"2-docker-pull",level:2},{value:"3. Docker images",id:"3-docker-images",level:2},{value:"4. Docker ps",id:"4-docker-ps",level:2},{value:"5. Docker run",id:"5-docker-run",level:2},{value:"6. Docker exec",id:"6-docker-exec",level:2},{value:"7. Docker logs",id:"7-docker-logs",level:2},{value:"8. Docker stop",id:"8-docker-stop",level:2},{value:"10. Docker rmi",id:"10-docker-rmi",level:2},{value:"References",id:"references",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...n}=e;return(0,r.kt)(p,(0,a.Z)({},u,n,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"1-normal-installation-confirmation"},"1. Normal installation confirmation"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",null,"If installed correctly, you should be able to see the following message."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Hello from Docker!\nThis message shows that your installation appears to be working correctly.\n....\n")),(0,r.kt)("p",null,(0,r.kt)("strong",{parentName:"p"},"(For ubuntu)")," If you want to use without sudo, please refer to the following site."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"},"https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user"))),(0,r.kt)("h2",{id:"2-docker-pull"},"2. Docker Pull"),(0,r.kt)("p",null,"Docker pull is a command to download Docker images from a Docker image registry (a repository where Docker images are stored and shared)."),(0,r.kt)("p",null,"You can check the arguments available in docker pull using the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull --help\n")),(0,r.kt)("p",null,"If performed normally, it prints out as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker pull [OPTIONS] NAME[:TAG|@DIGEST]\n\nPull an image or a repository from a registry\n\nOptions:\n -a, --all-tags Download all tagged images in the repository\n --disable-content-trust Skip image verification (default true)\n --platform string Set platform if server is multi-platform capable\n -q, --quiet Suppress verbose output\n")),(0,r.kt)("p",null,"It can be seen here that docker pull takes two types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"NAME[:TAG|@DIGEST]"))),(0,r.kt)("p",null,"In order to use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," options from help, they must be used before the NAME.\nLet's try and pull the ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu:18.04")," image directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker pull ubuntu:18.04\n")),(0,r.kt)("p",null,"If interpreted correctly, the command means to pull an image with the tag ",(0,r.kt)("inlineCode",{parentName:"p"},"18.04")," from an image named ",(0,r.kt)("inlineCode",{parentName:"p"},"ubuntu"),"."),(0,r.kt)("p",null,"If performed successfully, it will produce an output similar to the following."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"18.04: Pulling from library/ubuntu\n20d796c36622: Pull complete \nDigest: sha256:42cd9143b6060261187a72716906187294b8b66653b50d70bc7a90ccade5c984\nStatus: Downloaded newer image for ubuntu:18.04\ndocker.io/library/ubuntu:18.04\n")),(0,r.kt)("p",null,"If you perform the above command, you will download the image called 'ubuntu:18.04' from a registry named ",(0,r.kt)("a",{parentName:"p",href:"http://docker.io/library/"},"docker.io/library")," to your laptop."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Note that ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"in the future, if you need to get a docker image from a certain ",(0,r.kt)("strong",{parentName:"li"},"private")," registry instead of docker.io or public docker hub, you can use ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/login/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker login"))," to point to the certain registry, then use ",(0,r.kt)("inlineCode",{parentName:"li"},"docker pull"),". Alternatively, you can set up an ",(0,r.kt)("a",{parentName:"li",href:"https://stackoverflow.com/questions/42211380/add-insecure-registry-to-docker"},"insecure registry"),". "),(0,r.kt)("li",{parentName:"ul"},"Also note that ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/save/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker save"))," and ",(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/load/"},(0,r.kt)("inlineCode",{parentName:"a"},"docker load"))," commands are available to store and share docker images in the form of ",(0,r.kt)("inlineCode",{parentName:"li"},".tar")," file in an intranet.")))),(0,r.kt)("h2",{id:"3-docker-images"},"3. Docker images"),(0,r.kt)("p",null,"This is the command to list the Docker images that exist locally."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images --help\n")),(0,r.kt)("p",null,"The arguments available for use in docker images are as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker images [OPTIONS] [REPOSITORY[:TAG]]\n\nList images\n\nOptions:\n -a, --all Show all images (default hides intermediate images)\n --digests Show digests\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print images using a Go template\n --no-trunc Don't truncate output\n -q, --quiet Only show image IDs\n")),(0,r.kt)("p",null,"Let's try executing the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"If you install Docker and proceed with this practice, it will output something similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"If you use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-q")," argument among the possible arguments, only the ",(0,r.kt)("inlineCode",{parentName:"p"},"IMAGE ID")," will be printed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images -q\n")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"29e70752d7b2\n")),(0,r.kt)("h2",{id:"4-docker-ps"},"4. Docker ps"),(0,r.kt)("p",null,"Command to output the list of currently running Docker containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps --help\n")),(0,r.kt)("p",null,"Use the following arguments can be used with 'docker ps':"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker ps [OPTIONS]\n\nList containers\n\nOptions:\n -a, --all Show all containers (default shows just running)\n -f, --filter filter Filter output based on conditions provided\n --format string Pretty-print containers using a Go template\n -n, --last int Show n last created containers (includes all states) (default -1)\n -l, --latest Show the latest created container (includes all states)\n --no-trunc Don't truncate output\n -q, --quiet Only display container IDs\n -s, --size Display total file sizes\n")),(0,r.kt)("p",null,"Let's try running the command below directly."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps\n")),(0,r.kt)("p",null,"If there are no currently running containers, it will be as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"If there is a container running, it will look similar to this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nc1e8f5e89d8d ubuntu "sleep 3600" 13 seconds ago Up 12 seconds trusting_newton\n')),(0,r.kt)("h2",{id:"5-docker-run"},"5. Docker run"),(0,r.kt)("p",null,"Command to run a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --help\n")),(0,r.kt)("p",null,"The command to run docker run is as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\n\nRun a command in a new container\n")),(0,r.kt)("p",null,"What we need to confirm here is that the docker run command takes three types of arguments. "),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[OPTIONS]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[COMMAND]")),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("inlineCode",{parentName:"li"},"[ARG...]"))),(0,r.kt)("p",null,"Let's try running a docker container ourselves."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"## Usage: docker run [OPTIONS] IMAGE [COMMAND] [ARG...]\ndocker run -it --name demo1 ubuntu:18.04 /bin/bash\n")),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"-it"),": Combination of ",(0,r.kt)("inlineCode",{parentName:"li"},"-i")," and ",(0,r.kt)("inlineCode",{parentName:"li"},"-t")," options",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},"Runs the container and connects it to an interactive terminal"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"--name"),": Assigns a name to the container for easier identification instead of using the container ID"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash"),": Specifies the command to be executed in the container upon startup, where ",(0,r.kt)("inlineCode",{parentName:"li"},"/bin/bash")," opens a bash shell.")),(0,r.kt)("p",null,"After running the command, you can exit the container by using the ",(0,r.kt)("inlineCode",{parentName:"p"},"exit")," command."),(0,r.kt)("p",null,"When you enter the previously learned ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," command, the following output will be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n")),(0,r.kt)("p",null,"It was said that the container being executed was coming out, but for some reason the container that was just executed does not appear. The reason is that ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," shows the currently running containers by default. If you want to see the stopped containers too, you must give the ",(0,r.kt)("inlineCode",{parentName:"p"},"-a")," option."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"Then the list of terminated containers will also be displayed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 2 minutes ago Exited (0) 2 minutes ago demo1\n')),(0,r.kt)("h2",{id:"6-docker-exec"},"6. Docker exec"),(0,r.kt)("p",null,"Docker exec is a command that is used to issue commands or access the inside of a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec --help\n")),(0,r.kt)("p",null,"For example, let's try running the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d --name demo2 ubuntu:18.04 sleep 3600\n")),(0,r.kt)("p",null,"Here, the ",(0,r.kt)("inlineCode",{parentName:"p"},"-d")," option is a command that allows the Docker container to run in the background so that even if the connection ends to the container, it continues to run."),(0,r.kt)("p",null,"Use ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," to check if it is currently running."),(0,r.kt)("p",null,"It can be confirmed that it is running as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 seconds ago Up 3 seconds demo2\n')),(0,r.kt)("p",null,"Now let's connect to the running docker container through the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker exec")," command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker exec -it demo2 /bin/bash\n")),(0,r.kt)("p",null,"This is the same as the previous ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command, allowing you to access the inside of the container."),(0,r.kt)("p",null,"You can exit using ",(0,r.kt)("inlineCode",{parentName:"p"},"exit"),"."),(0,r.kt)("h2",{id:"7-docker-logs"},"7. Docker logs"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs --help\n")),(0,r.kt)("p",null,"I will have the following container be executed."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'docker run --name demo3 -d busybox sh -c "while true; do $(echo date); sleep 1; done"\n')),(0,r.kt)("p",null,'By using the above command, we have set up a busybox container named "test" as a Docker container in the background and printed the current time once every second.'),(0,r.kt)("p",null,"Now let's check the log with the command below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3\n")),(0,r.kt)("p",null,"If performed normally, it will be similar to below."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"Sun Mar 6 11:06:49 UTC 2022\nSun Mar 6 11:06:50 UTC 2022\nSun Mar 6 11:06:51 UTC 2022\nSun Mar 6 11:06:52 UTC 2022\nSun Mar 6 11:06:53 UTC 2022\nSun Mar 6 11:06:54 UTC 2022\n")),(0,r.kt)("p",null,"However, if used this way, you can only check the logs taken so far.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, you can use the ",(0,r.kt)("inlineCode",{parentName:"p"},"-f")," option to keep watching and outputting."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker logs demo3 -f \n")),(0,r.kt)("h2",{id:"8-docker-stop"},"8. Docker stop"),(0,r.kt)("p",null,"Command to stop a running Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop --help\n")),(0,r.kt)("p",null,"Through ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", you can check the containers currently running, as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" About a minute ago Up About a minute demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 4 minutes ago Up 4 minutes demo2\n')),(0,r.kt)("p",null,"Now let's try to stop Docker with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker stop"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo2\n")),(0,r.kt)("p",null,"After executing, type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps")," again."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 2 minutes ago Up 2 minutes demo3\n')),(0,r.kt)("p",null,"Comparing with the above result, you can see that the demo2 container has disappeared from the list of currently running containers.\nThe rest of the containers will also be stopped."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker stop demo3\n")),(0,r.kt)("p",null,"Docker rm: Command to delete a Docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm --help\n")),(0,r.kt)("p",null,"Docker containers are in a stopped state by default. That's why you can see stopped containers using ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a"),".\nBut why do we have to delete the stopped containers?",(0,r.kt)("br",{parentName:"p"}),"\n","Even when stopped, the data used in the Docker remains in the container.\nSo you can restart the container through restarting. But this process will use disk.\nSo\nin order to delete the containers that are not used at all, we should use the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker rm")," command."),(0,r.kt)("p",null," First, let's check the current containers."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker ps -a\n")),(0,r.kt)("p",null,"There are three containers as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n730391669c39 busybox "sh -c \'while true; \u2026" 4 minutes ago Exited (137) About a minute ago demo3\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1\n')),(0,r.kt)("p",null,"Let's try to delete the 'demo3' container through the following command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo3\n")),(0,r.kt)("p",null,"The command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps -a")," reduced it to two lines as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\nfc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2\n4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1\n')),(0,r.kt)("p",null,"Delete the remaining containers as well."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rm demo2\ndocker rm demo1\n")),(0,r.kt)("h2",{id:"10-docker-rmi"},"10. Docker rmi"),(0,r.kt)("p",null,"Command to delete a Docker image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi --help\n")),(0,r.kt)("p",null,"Use the following commands to check which images are currently on the local."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker images\n")),(0,r.kt)("p",null,"The following is output."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nbusybox latest a8440bba1bc0 32 hours ago 1.41MB\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("p",null,"I will try to delete the ",(0,r.kt)("inlineCode",{parentName:"p"},"busybox")," image."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker rmi busybox\n")),(0,r.kt)("p",null,"If you type ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," again, the following will appear."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"REPOSITORY TAG IMAGE ID CREATED SIZE\nubuntu 18.04 29e70752d7b2 2 days ago 56.7MB\n")),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"},"https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry"))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/9a3eef67.c5e96caf.js b/en/assets/js/9a3eef67.2c2a8de4.js similarity index 98% rename from en/assets/js/9a3eef67.c5e96caf.js rename to en/assets/js/9a3eef67.2c2a8de4.js index 565358e3..b0b0442d 100644 --- a/en/assets/js/9a3eef67.c5e96caf.js +++ b/en/assets/js/9a3eef67.2c2a8de4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9512],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),c=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=c(n),k=o,m=d["".concat(l,".").concat(k)]||d[k]||p[k]||a;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[d]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/introduction",id:"prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/en/docs/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/introduction.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/en/docs/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/en/docs/prerequisites/docker/"}},l={},c=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"Docker & Kubernetes",id:"docker--kubernetes",level:2},{value:"Not a software but a product",id:"not-a-software-but--a-product",level:3},{value:"Docker",id:"docker",level:4},{value:"Kubernetes",id:"kubernetes",level:4},{value:"History of Open source",id:"history-of-open-source",level:3},{value:"Initial Docker & Kubernetes",id:"initial-docker--kubernetes",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"Current docker & kubernetes",id:"current-docker--kubernetes",level:4},{value:"References",id:"references",level:3}],u={toc:c},d="wrapper";function p(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,o.kt)("p",null,"To operationalize machine learning models, additional functionalities beyond model development are required."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Training Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Schedule management for model training commands"),(0,o.kt)("li",{parentName:"ul"},"Ensuring reproducibility of trained models"))),(0,o.kt)("li",{parentName:"ol"},"Deployment Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Traffic distribution"),(0,o.kt)("li",{parentName:"ul"},"Monitoring service failures"),(0,o.kt)("li",{parentName:"ul"},"Troubleshooting in case of failures")))),(0,o.kt)("p",null,"Fortunately, the software development field has already put a lot of thought and effort into addressing these needs. Therefore, when deploying machine learning models, leveraging the outcomes of these considerations can be highly beneficial. Docker and Kubernetes are two prominent software products widely used in MLOps to address these needs."),(0,o.kt)("h2",{id:"docker--kubernetes"},"Docker & Kubernetes"),(0,o.kt)("h3",{id:"not-a-software-but--a-product"},"Not a software but a product"),(0,o.kt)("p",null,"Docker and Kubernetes are representative software (products) that provide containerization and container orchestration functions respectively."),(0,o.kt)("h4",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker was the mainstream in the past, but its usage has been decreasing gradually with the addition of various paid policy.",(0,o.kt)("br",{parentName:"p"}),"\n","However, as of March 2022, it is still the most commonly used container virtualization software."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2019.png",src:n(3293).Z,width:"1600",height:"900"})),(0,o.kt)("center",null," [from sysdig 2019] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2021.png",src:n(9114).Z,width:"750",height:"437"})),(0,o.kt)("center",null," [from sysdig 2021] "),(0,o.kt)("h4",{id:"kubernetes"},"Kubernetes"),(0,o.kt)("p",null,"Kubernetes: Kubernetes is a product that has almost no comparison so far."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cncf-survey.png",src:n(794).Z,width:"2048",height:"1317"})),(0,o.kt)("center",null," [from cncf survey] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"t4-ai.png",src:n(9686).Z,width:"926",height:"629"})),(0,o.kt)("center",null," [from t4.ai] "),(0,o.kt)("h3",{id:"history-of-open-source"},"History of Open source"),(0,o.kt)("h4",{id:"initial-docker--kubernetes"},"Initial Docker & Kubernetes"),(0,o.kt)("p",null,"At the beginning of Docker development, ",(0,o.kt)("strong",{parentName:"p"},"one package")," called Docker Engine contained multiple features such as API, CLI, networking, storage, etc., but it began to be ",(0,o.kt)("strong",{parentName:"p"},"divided one by one")," according to the philosophy of ",(0,o.kt)("strong",{parentName:"p"},"MSA"),".",(0,o.kt)("br",{parentName:"p"}),"\n","However, the initial Kubernetes included Docker Engine for container virtualization.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, whenever the Docker version was updated, the interface of Docker Engine changed and Kubernetes was greatly affected."),(0,o.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,o.kt)("p",null,"In order to alleviate such inconveniences, many groups interested in container technology such as Google have come together to start the Open Container Initiative (OCI) project to set standards for containers.",(0,o.kt)("br",{parentName:"p"}),"\n","Docker further separated its interface and developed Containerd, a Container Runtime that adheres to the OCI standard, and added an abstraction layer so that dockerd calls the API of Containerd."),(0,o.kt)("p",null,"In accordance with this flow, Kubernetes also now supports not only Docker, but any Container Runtime that adheres to the OCI standard and the specified specifications with the Container Runtime Interface (CRI) specification, starting from version 1.5. "),(0,o.kt)("h4",{id:"cri-o"},"CRI-O"),(0,o.kt)("p",null,"CRI-O is a container runtime developed by Red Hat, Intel, SUSE, and IBM, which adheres to the OCI standard + CRI specifications, specifically for Kubernetes."),(0,o.kt)("h4",{id:"current-docker--kubernetes"},"Current docker & kubernetes"),(0,o.kt)("p",null,"Currently, Docker and Kubernetes have been using Docker Engine as the default container runtime, but since Docker's API did not match the CRI specification (",(0,o.kt)("em",{parentName:"p"},"OCI follows"),"), Kubernetes developed and supported a ",(0,o.kt)("strong",{parentName:"p"},"dockershim")," to make Docker's API compatible with CRI, (",(0,o.kt)("em",{parentName:"p"},"it was a huge burden for Kubernetes, not for Docker"),"). This was ",(0,o.kt)("strong",{parentName:"p"},"deprecated from Kubernetes v1.20 and abandoned from v1.23"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"v1.23 will be released in December 2021")),(0,o.kt)("p",null,"So from Kubernetes v1.23, you can no longer use Docker natively.\nHowever, ",(0,o.kt)("strong",{parentName:"p"},"users are not much affected by this change")," because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of."),(0,o.kt)("h3",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,o.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}p.isMDXComponent=!0},794:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},3293:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},9114:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},9686:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9512],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),c=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=c(e.components);return r.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},k=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=c(n),k=o,m=d["".concat(l,".").concat(k)]||d[k]||p[k]||a;return n?r.createElement(m,i(i({ref:t},u),{},{components:n})):r.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=k;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[d]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const a={title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/introduction",id:"prerequisites/docker/introduction",title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/introduction.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/introduction",permalink:"/en/docs/prerequisites/docker/introduction",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/introduction.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"Why Docker & Kubernetes ?",description:"Introduction to Docker.",sidebar_position:2,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Install Docker",permalink:"/en/docs/prerequisites/docker/install"},next:{title:"What is Docker?",permalink:"/en/docs/prerequisites/docker/"}},l={},c=[{value:"Why Kubernetes ?",id:"why-kubernetes-",level:2},{value:"Docker & Kubernetes",id:"docker--kubernetes",level:2},{value:"Not a software but a product",id:"not-a-software-but--a-product",level:3},{value:"Docker",id:"docker",level:4},{value:"Kubernetes",id:"kubernetes",level:4},{value:"History of Open source",id:"history-of-open-source",level:3},{value:"Initial Docker & Kubernetes",id:"initial-docker--kubernetes",level:4},{value:"Open Container Initiative",id:"open-container-initiative",level:4},{value:"CRI-O",id:"cri-o",level:4},{value:"Current docker & kubernetes",id:"current-docker--kubernetes",level:4},{value:"References",id:"references",level:3}],u={toc:c},d="wrapper";function p(e){let{components:t,...a}=e;return(0,o.kt)(d,(0,r.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"why-kubernetes-"},"Why Kubernetes ?"),(0,o.kt)("p",null,"To operationalize machine learning models, additional functionalities beyond model development are required."),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Training Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Schedule management for model training commands"),(0,o.kt)("li",{parentName:"ul"},"Ensuring reproducibility of trained models"))),(0,o.kt)("li",{parentName:"ol"},"Deployment Phase",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"Traffic distribution"),(0,o.kt)("li",{parentName:"ul"},"Monitoring service failures"),(0,o.kt)("li",{parentName:"ul"},"Troubleshooting in case of failures")))),(0,o.kt)("p",null,"Fortunately, the software development field has already put a lot of thought and effort into addressing these needs. Therefore, when deploying machine learning models, leveraging the outcomes of these considerations can be highly beneficial. Docker and Kubernetes are two prominent software products widely used in MLOps to address these needs."),(0,o.kt)("h2",{id:"docker--kubernetes"},"Docker & Kubernetes"),(0,o.kt)("h3",{id:"not-a-software-but--a-product"},"Not a software but a product"),(0,o.kt)("p",null,"Docker and Kubernetes are representative software (products) that provide containerization and container orchestration functions respectively."),(0,o.kt)("h4",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker was the mainstream in the past, but its usage has been decreasing gradually with the addition of various paid policy.",(0,o.kt)("br",{parentName:"p"}),"\n","However, as of March 2022, it is still the most commonly used container virtualization software."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2019.png",src:n(3293).Z,width:"1600",height:"900"})),(0,o.kt)("center",null," [from sysdig 2019] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"sysdig-2021.png",src:n(9114).Z,width:"750",height:"437"})),(0,o.kt)("center",null," [from sysdig 2021] "),(0,o.kt)("h4",{id:"kubernetes"},"Kubernetes"),(0,o.kt)("p",null,"Kubernetes: Kubernetes is a product that has almost no comparison so far."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cncf-survey.png",src:n(794).Z,width:"2048",height:"1317"})),(0,o.kt)("center",null," [from cncf survey] "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"t4-ai.png",src:n(9686).Z,width:"926",height:"629"})),(0,o.kt)("center",null," [from t4.ai] "),(0,o.kt)("h3",{id:"history-of-open-source"},"History of Open source"),(0,o.kt)("h4",{id:"initial-docker--kubernetes"},"Initial Docker & Kubernetes"),(0,o.kt)("p",null,"At the beginning of Docker development, ",(0,o.kt)("strong",{parentName:"p"},"one package")," called Docker Engine contained multiple features such as API, CLI, networking, storage, etc., but it began to be ",(0,o.kt)("strong",{parentName:"p"},"divided one by one")," according to the philosophy of ",(0,o.kt)("strong",{parentName:"p"},"MSA"),".",(0,o.kt)("br",{parentName:"p"}),"\n","However, the initial Kubernetes included Docker Engine for container virtualization.",(0,o.kt)("br",{parentName:"p"}),"\n","Therefore, whenever the Docker version was updated, the interface of Docker Engine changed and Kubernetes was greatly affected."),(0,o.kt)("h4",{id:"open-container-initiative"},"Open Container Initiative"),(0,o.kt)("p",null,"In order to alleviate such inconveniences, many groups interested in container technology such as Google have come together to start the Open Container Initiative (OCI) project to set standards for containers.",(0,o.kt)("br",{parentName:"p"}),"\n","Docker further separated its interface and developed Containerd, a Container Runtime that adheres to the OCI standard, and added an abstraction layer so that dockerd calls the API of Containerd."),(0,o.kt)("p",null,"In accordance with this flow, Kubernetes also now supports not only Docker, but any Container Runtime that adheres to the OCI standard and the specified specifications with the Container Runtime Interface (CRI) specification, starting from version 1.5. "),(0,o.kt)("h4",{id:"cri-o"},"CRI-O"),(0,o.kt)("p",null,"CRI-O is a container runtime developed by Red Hat, Intel, SUSE, and IBM, which adheres to the OCI standard + CRI specifications, specifically for Kubernetes."),(0,o.kt)("h4",{id:"current-docker--kubernetes"},"Current docker & kubernetes"),(0,o.kt)("p",null,"Currently, Docker and Kubernetes have been using Docker Engine as the default container runtime, but since Docker's API did not match the CRI specification (",(0,o.kt)("em",{parentName:"p"},"OCI follows"),"), Kubernetes developed and supported a ",(0,o.kt)("strong",{parentName:"p"},"dockershim")," to make Docker's API compatible with CRI, (",(0,o.kt)("em",{parentName:"p"},"it was a huge burden for Kubernetes, not for Docker"),"). This was ",(0,o.kt)("strong",{parentName:"p"},"deprecated from Kubernetes v1.20 and abandoned from v1.23"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"v1.23 will be released in December 2021")),(0,o.kt)("p",null,"So from Kubernetes v1.23, you can no longer use Docker natively.\nHowever, ",(0,o.kt)("strong",{parentName:"p"},"users are not much affected by this change")," because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of."),(0,o.kt)("h3",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://www.linkedin.com/pulse/containerd%EB%8A%94-%EB%AC%B4%EC%97%87%EC%9D%B4%EA%B3%A0-%EC%99%9C-%EC%A4%91%EC%9A%94%ED%95%A0%EA%B9%8C-sean-lee/?originalSubdomain=kr"},(0,o.kt)("em",{parentName:"a"},"https://www.linkedin.com/pulse/containerd\ub294-\ubb34\uc5c7\uc774\uace0-\uc65c-\uc911\uc694\ud560\uae4c-sean-lee/?originalSubdomain=kr"))),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/"},"https://kubernetes.io/blog/2021/12/07/kubernetes-1-23-release-announcement/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dockershim-faq/"},"https://kubernetes.io/blog/2020/12/02/dockershim-faq/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/blog/2020/12/02/dont-panic-kubernetes-and-docker/")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"},"https://kubernetes.io/ko/blog/2020/12/02/dont-panic-kubernetes-and-docker/"))))}p.isMDXComponent=!0},794:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/cncf-survey-53378aeae96c2069d60cbd72e31baa22.png"},3293:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2019-a7a9178a83773e8126833287a7fb755c.png"},9114:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/sysdig-2021-d575835a018c7b99ef06c932a46953a3.png"},9686:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/t4-ai-f055bc33fd1f8fd7b098b71508aac896.png"}}]); \ No newline at end of file diff --git a/en/assets/js/9bd4ad20.b7b68fbf.js b/en/assets/js/9bd4ad20.6f1994c0.js similarity index 99% rename from en/assets/js/9bd4ad20.b7b68fbf.js rename to en/assets/js/9bd4ad20.6f1994c0.js index 080deb97..13de8255 100644 --- a/en/assets/js/9bd4ad20.b7b68fbf.js +++ b/en/assets/js/9bd4ad20.6f1994c0.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4447],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),m=p(a),d=l,k=m["".concat(s,".").concat(d)]||m[d]||c[d]||r;return a?n.createElement(k,o(o({ref:t},u),{},{components:a})):n.createElement(k,o({ref:t},u))}));function k(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,o=new Array(r);o[0]=d;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:l,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"version-1.0/setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],u={toc:p},m="wrapper";function c(e){let{components:t,...a}=e;return(0,l.kt)(m,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,l.kt)("p",null,"On this page, we will explain how to install the modules that will be used on the cluster from the client nodes.",(0,l.kt)("br",{parentName:"p"}),"\n","All the processes introduced here will be done on the ",(0,l.kt)("strong",{parentName:"p"},"client nodes"),"."),(0,l.kt)("h2",{id:"helm"},"Helm"),(0,l.kt)("p",null,"Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download Helm version 3.7.1 into the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS refer to the ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"official website")," for the download path of the binary that matches the OS and CPU of your client node."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip the file to use helm and move the file to its desired location."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check to see if the installation was successful:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,l.kt)("p",{parentName:"li"},"Environment variables:"),(0,l.kt)("table",{parentName:"li"},(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,l.kt)("p",{parentName:"li"},"..."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre"},"")))),(0,l.kt)("h2",{id:"kustomize"},"Kustomize"),(0,l.kt)("p",null,"Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download the binary version of kustomize v3.10.0 in the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS can be downloaded from ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0")," after checking."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip to use kustomize, and change the file location. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check if it is installed correctly."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,l.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following messages, it means that the installation was successful: "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")))),(0,l.kt)("p",null,"If successful, it will display the following output:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n")),(0,l.kt)("ol",{start:4},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Execute the following command to change the default storage class:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,l.kt)("p",{parentName:"li"},"If the command is successful, the following output will be displayed:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Verify that the default storage class has been set:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,l.kt)("p",{parentName:"li"},"Check if there is a storage class with the name ",(0,l.kt)("inlineCode",{parentName:"p"},"local-path (default)")," in the NAME column:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4447],{3905:(e,t,a)=>{a.d(t,{Zo:()=>u,kt:()=>k});var n=a(7294);function l(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function r(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function o(e){for(var t=1;t=0||(l[a]=e[a]);return l}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(l[a]=e[a])}return l}var s=n.createContext({}),p=function(e){var t=n.useContext(s),a=t;return e&&(a="function"==typeof e?e(t):o(o({},t),e)),a},u=function(e){var t=p(e.components);return n.createElement(s.Provider,{value:t},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var a=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,u=i(e,["components","mdxType","originalType","parentName"]),m=p(a),d=l,k=m["".concat(s,".").concat(d)]||m[d]||c[d]||r;return a?n.createElement(k,o(o({ref:t},u),{},{components:a})):n.createElement(k,o({ref:t},u))}));function k(e,t){var a=arguments,l=t&&t.mdxType;if("string"==typeof e||l){var r=a.length,o=new Array(r);o[0]=d;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:l,o[1]=i;for(var p=2;p{a.r(t),a.d(t,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>p});var n=a(7462),l=(a(7294),a(3905));const r={title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},o=void 0,i={unversionedId:"setup-kubernetes/install-kubernetes-module",id:"version-1.0/setup-kubernetes/install-kubernetes-module",title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-kubernetes-module",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/install-kubernetes-module.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Install Kubernetes Modules",description:"Install Helm, Kustomize",sidebar_position:5,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.2. Minikube",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube"},next:{title:"6. (Optional) Setup GPU",permalink:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu"}},s={},p=[{value:"Setup Kubernetes Modules",id:"setup-kubernetes-modules",level:2},{value:"Helm",id:"helm",level:2},{value:"Kustomize",id:"kustomize",level:2},{value:"CSI Plugin : Local Path Provisioner",id:"csi-plugin--local-path-provisioner",level:2}],u={toc:p},m="wrapper";function c(e){let{components:t,...a}=e;return(0,l.kt)(m,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"setup-kubernetes-modules"},"Setup Kubernetes Modules"),(0,l.kt)("p",null,"On this page, we will explain how to install the modules that will be used on the cluster from the client nodes.",(0,l.kt)("br",{parentName:"p"}),"\n","All the processes introduced here will be done on the ",(0,l.kt)("strong",{parentName:"p"},"client nodes"),"."),(0,l.kt)("h2",{id:"helm"},"Helm"),(0,l.kt)("p",null,"Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download Helm version 3.7.1 into the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS refer to the ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/helm/helm/releases/tag/v3.7.1"},"official website")," for the download path of the binary that matches the OS and CPU of your client node."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip the file to use helm and move the file to its desired location."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf helm-v3.7.1-linux-amd64.tar.gz\nsudo mv linux-amd64/helm /usr/local/bin/helm\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check to see if the installation was successful:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"helm help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"The Kubernetes package manager\n\nCommon actions for Helm:\n")))),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm search: search for charts")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm pull: download a chart to your local directory to view")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm install: upload the chart to Kubernetes")),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"helm list: list releases of charts"),(0,l.kt)("p",{parentName:"li"},"Environment variables:"),(0,l.kt)("table",{parentName:"li"},(0,l.kt)("thead",{parentName:"table"},(0,l.kt)("tr",{parentName:"thead"},(0,l.kt)("th",{parentName:"tr",align:null},"Name"),(0,l.kt)("th",{parentName:"tr",align:null},"Description"))),(0,l.kt)("tbody",{parentName:"table"},(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CACHE_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing cached files.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_CONFIG_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm configuration.")),(0,l.kt)("tr",{parentName:"tbody"},(0,l.kt)("td",{parentName:"tr",align:null},"$HELM_DATA_HOME"),(0,l.kt)("td",{parentName:"tr",align:null},"set an alternative location for storing Helm data.")))),(0,l.kt)("p",{parentName:"li"},"..."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre"},"")))),(0,l.kt)("h2",{id:"kustomize"},"Kustomize"),(0,l.kt)("p",null,"Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once."),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"Download the binary version of kustomize v3.10.0 in the current folder.")),(0,l.kt)("ul",null,(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"For Linux amd64"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz\n"))),(0,l.kt)("li",{parentName:"ul"},(0,l.kt)("p",{parentName:"li"},"Other OS can be downloaded from ",(0,l.kt)("a",{parentName:"p",href:"https://github.com/kubernetes-sigs/kustomize/releases/tag/kustomize%2Fv3.10.0"},"kustomize/v3.10.0")," after checking."))),(0,l.kt)("ol",{start:2},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Unzip to use kustomize, and change the file location. "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz\nsudo mv kustomize /usr/local/bin/kustomize\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Check if it is installed correctly."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kustomize help\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following message, it means that it has been installed normally."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"Manages declarative configuration of Kubernetes.\nSee https://sigs.k8s.io/kustomize\n\nUsage:\n kustomize [command]\n\nAvailable Commands:\n build Print configuration per contents of kustomization.yaml\n cfg Commands for reading and writing configuration.\n completion Generate shell completion script\n create Create a new kustomization in the current directory\n edit Edits a kustomization file\n fn Commands for running functions against configuration.\n...\n")))),(0,l.kt)("h2",{id:"csi-plugin--local-path-provisioner"},"CSI Plugin : Local Path Provisioner"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters."),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml\n")),(0,l.kt)("p",{parentName:"li"},"If you see the following messages, it means that the installation was successful: "),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"namespace/local-path-storage created\nserviceaccount/local-path-provisioner-service-account created\nclusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created\nclusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created\ndeployment.apps/local-path-provisioner created\nstorageclass.storage.k8s.io/local-path created\nconfigmap/local-path-config created\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl -n local-path-storage get pod\n")))),(0,l.kt)("p",null,"If successful, it will display the following output:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nlocal-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m\n")),(0,l.kt)("ol",{start:4},(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Execute the following command to change the default storage class:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl patch storageclass local-path -p \'{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}\'\n')),(0,l.kt)("p",{parentName:"li"},"If the command is successful, the following output will be displayed:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"storageclass.storage.k8s.io/local-path patched\n"))),(0,l.kt)("li",{parentName:"ol"},(0,l.kt)("p",{parentName:"li"},"Verify that the default storage class has been set:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get sc\n")),(0,l.kt)("p",{parentName:"li"},"Check if there is a storage class with the name ",(0,l.kt)("inlineCode",{parentName:"p"},"local-path (default)")," in the NAME column:"),(0,l.kt)("pre",{parentName:"li"},(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE\nlocal-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h\n")))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/9c3963e5.1f415d9e.js b/en/assets/js/9c3963e5.6cb1685c.js similarity index 99% rename from en/assets/js/9c3963e5.1f415d9e.js rename to en/assets/js/9c3963e5.6cb1685c.js index 4cc45da1..87a13f38 100644 --- a/en/assets/js/9c3963e5.1f415d9e.js +++ b/en/assets/js/9c3963e5.6cb1685c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5878],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=i.createContext({}),d=function(e){var t=i.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},c=function(e){var t=d(e.components);return i.createElement(l.Provider,{value:t},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},h=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=d(n),h=o,m=p["".concat(l,".").concat(h)]||p[h]||u[h]||a;return n?i.createElement(m,r(r({ref:t},c),{},{components:n})):i.createElement(m,r({ref:t},c))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,r=new Array(a);r[0]=h;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>u,frontMatter:()=>a,metadata:()=>s,toc:()=>d});var i=n(7462),o=(n(7294),n(3905));const a={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/levels",id:"introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/en/docs/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/levels.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/en/docs/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/en/docs/introduction/component"}},l={},d=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"Level 0: Manual Process",id:"level-0-manual-process",level:2},{value:"Level 1: Automated ML Pipeline",id:"level-1-automated-ml-pipeline",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"Level 2: Automating the CI/CD Pipeline",id:"level-2-automating-the-cicd-pipeline",level:2}],c={toc:d},p="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,i.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"This page will look at the steps of MLOps outlined by Google and explore what the core features of MLOps are."),(0,o.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,o.kt)("p",null,"Google has been talking about the need for MLOps since as far back as 2015. The paper Hidden Technical Debt in Machine Learning Systems encapsulates this idea from Google. "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"paper",src:n(3051).Z,width:"840",height:"638"})),(0,o.kt)("p",null,"The key takeaway from this paper is that the machine learning code is only a small part of the entire system when it comes to building products with machine learning."),(0,o.kt)("p",null,"Google developed MLOps by evolving this paper and expanding the term. More details can be found on the ",(0,o.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"Google Cloud homepage"),". In this post, we will try to explain what Google means by MLOps."),(0,o.kt)("p",null,"Google divided the evolution of MLOps into three (0-2) stages. Before explaining each stage, let's review some of the concepts described in the previous post."),(0,o.kt)("p",null,"In order to operate a machine learning model, there is a machine learning team responsible for developing the model and an operations team responsible for deployment and operations. MLOps is needed for the successful collaboration of these two teams. We have previously said that it can be done simply through Continuous Integration (CI) / Continuous Deployment (CD), so let us see how to do CI / CD."),(0,o.kt)("h2",{id:"level-0-manual-process"},"Level 0: Manual Process"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-0",src:n(344).Z,width:"1332",height:"494"})),(0,o.kt)("p",null,'At the 0th stage, two teams communicate through a "model". The machine learning team trains the model with accumulated data and delivers the trained model to the operation team. The operation team then deploys the model delivered in this way.'),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"toon",src:n(4390).Z,width:"1282",height:"1746"})),(0,o.kt)("p",null,'Initial machine learning models are deployed through this "model" centered communication. However, there are several problems with this distribution method. For example, if some functions use Python 3.7 and some use Python 3.8, we often see the following situation.'),(0,o.kt)("p",null,"The reason for this situation lies in the characteristics of the machine learning model. Three things are needed for the trained machine learning model to work:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Python code"),(0,o.kt)("li",{parentName:"ol"},"Trained weights"),(0,o.kt)("li",{parentName:"ol"},"Environment (Packages, versions)")),(0,o.kt)("p",null,"If any of these three aspects is communicated incorrectly, the model may fail to function or make unexpected predictions. However, in many cases, models fail to work due to environmental mismatches. Machine learning relies on various open-source libraries, and due to the nature of open-source, even the same function can produce different results depending on the version used."),(0,o.kt)("p",null,"In the early stages of a service, when there are not many models to manage, these issues can be resolved quickly. However, as the number of managed features increases and communication becomes more challenging, it becomes difficult to deploy models with better performance quickly."),(0,o.kt)("h2",{id:"level-1-automated-ml-pipeline"},"Level 1: Automated ML Pipeline"),(0,o.kt)("h3",{id:"pipeline"},"Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-pipeline",src:n(7008).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,'So, in MLOps, "pipeline" is used to prevent such problems. The MLOps pipeline ensures that the model operates in the same environment as the one used by the machine learning engineer during model development, using containers like Docker. This helps prevent situations where the model doesn\'t work due to differences in the environment.'),(0,o.kt)("p",null,'However, the term "pipeline" is used in a broader context and in various tasks. What is the role of the pipeline that machine learning engineers create? The pipeline created by machine learning engineers produces trained models. Therefore, it would be more accurate to refer to it as a training pipeline rather than just a pipeline.'),(0,o.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-ct.png",src:n(7689).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,"And the concept of Continuous Training (CT) is added. So why is CT necessary?"),(0,o.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,o.kt)("p",null,'In the real world, data exhibits a characteristic called "Data Shift," where the data distribution keeps changing over time. As a result, models trained in the past may experience performance degradation over time. The simplest and most effective solution to this problem is to retrain the model using recent data. By retraining the model according to the changed data distribution, it can regain its performance.'),(0,o.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,o.kt)("p",null,"However, in industries such as manufacturing, where multiple recipes are processed in a single factory, it may not always be desirable to retrain the model unconditionally. One common example is the blind spot."),(0,o.kt)("p",null,"For example, in an automotive production line, a model A was created and used for predictions. If an entirely different model B is introduced, it represents unseen data patterns, and a new model is trained for model B."),(0,o.kt)("p",null,'Now, the model will make predictions for model B. However, if the data switches back to model A, what should be done?\nIf there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.'),(0,o.kt)("p",null,"There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy."),(0,o.kt)("p",null,"To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously."),(0,o.kt)("h2",{id:"level-2-automating-the-cicd-pipeline"},"Level 2: Automating the CI/CD Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-2",src:n(5284).Z,width:"1356",height:"862"})),(0,o.kt)("p",null,"The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?"),(0,o.kt)("p",null,"In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline."),(0,o.kt)("p",null,"Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training."),(0,o.kt)("p",null,"In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly."),(0,o.kt)("p",null,"In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model."))}u.isMDXComponent=!0},344:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},7689:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},7008:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},5284:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},3051:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},4390:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5878],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>m});var i=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=i.createContext({}),d=function(e){var t=i.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},c=function(e){var t=d(e.components);return i.createElement(l.Provider,{value:t},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},h=i.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=d(n),h=o,m=p["".concat(l,".").concat(h)]||p[h]||u[h]||a;return n?i.createElement(m,r(r({ref:t},c),{},{components:n})):i.createElement(m,r({ref:t},c))}));function m(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,r=new Array(a);r[0]=h;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>u,frontMatter:()=>a,metadata:()=>s,toc:()=>d});var i=n(7462),o=(n(7294),n(3905));const a={title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/levels",id:"introduction/levels",title:"2. Levels of MLOps",description:"Levels of MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/levels.md",sourceDirName:"introduction",slug:"/introduction/levels",permalink:"/en/docs/introduction/levels",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/levels.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Levels of MLOps",description:"Levels of MLOps",sidebar_position:2,date:"2021-12-03T00:00:00.000Z",lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. What is MLOps?",permalink:"/en/docs/introduction/intro"},next:{title:"3. Components of MLOps",permalink:"/en/docs/introduction/component"}},l={},d=[{value:"Hidden Technical Debt in ML System",id:"hidden-technical-debt-in-ml-system",level:2},{value:"Level 0: Manual Process",id:"level-0-manual-process",level:2},{value:"Level 1: Automated ML Pipeline",id:"level-1-automated-ml-pipeline",level:2},{value:"Pipeline",id:"pipeline",level:3},{value:"Continuous Training",id:"continuous-training",level:3},{value:"Auto Retrain",id:"auto-retrain",level:4},{value:"Auto Deploy",id:"auto-deploy",level:4},{value:"Level 2: Automating the CI/CD Pipeline",id:"level-2-automating-the-cicd-pipeline",level:2}],c={toc:d},p="wrapper";function u(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,i.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"This page will look at the steps of MLOps outlined by Google and explore what the core features of MLOps are."),(0,o.kt)("h2",{id:"hidden-technical-debt-in-ml-system"},"Hidden Technical Debt in ML System"),(0,o.kt)("p",null,"Google has been talking about the need for MLOps since as far back as 2015. The paper Hidden Technical Debt in Machine Learning Systems encapsulates this idea from Google. "),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"paper",src:n(3051).Z,width:"840",height:"638"})),(0,o.kt)("p",null,"The key takeaway from this paper is that the machine learning code is only a small part of the entire system when it comes to building products with machine learning."),(0,o.kt)("p",null,"Google developed MLOps by evolving this paper and expanding the term. More details can be found on the ",(0,o.kt)("a",{parentName:"p",href:"https://cloud.google.com/architecture/mlops-continuous-delivery-and-automation-pipelines-in-machine-learning"},"Google Cloud homepage"),". In this post, we will try to explain what Google means by MLOps."),(0,o.kt)("p",null,"Google divided the evolution of MLOps into three (0-2) stages. Before explaining each stage, let's review some of the concepts described in the previous post."),(0,o.kt)("p",null,"In order to operate a machine learning model, there is a machine learning team responsible for developing the model and an operations team responsible for deployment and operations. MLOps is needed for the successful collaboration of these two teams. We have previously said that it can be done simply through Continuous Integration (CI) / Continuous Deployment (CD), so let us see how to do CI / CD."),(0,o.kt)("h2",{id:"level-0-manual-process"},"Level 0: Manual Process"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-0",src:n(344).Z,width:"1332",height:"494"})),(0,o.kt)("p",null,'At the 0th stage, two teams communicate through a "model". The machine learning team trains the model with accumulated data and delivers the trained model to the operation team. The operation team then deploys the model delivered in this way.'),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"toon",src:n(4390).Z,width:"1282",height:"1746"})),(0,o.kt)("p",null,'Initial machine learning models are deployed through this "model" centered communication. However, there are several problems with this distribution method. For example, if some functions use Python 3.7 and some use Python 3.8, we often see the following situation.'),(0,o.kt)("p",null,"The reason for this situation lies in the characteristics of the machine learning model. Three things are needed for the trained machine learning model to work:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"Python code"),(0,o.kt)("li",{parentName:"ol"},"Trained weights"),(0,o.kt)("li",{parentName:"ol"},"Environment (Packages, versions)")),(0,o.kt)("p",null,"If any of these three aspects is communicated incorrectly, the model may fail to function or make unexpected predictions. However, in many cases, models fail to work due to environmental mismatches. Machine learning relies on various open-source libraries, and due to the nature of open-source, even the same function can produce different results depending on the version used."),(0,o.kt)("p",null,"In the early stages of a service, when there are not many models to manage, these issues can be resolved quickly. However, as the number of managed features increases and communication becomes more challenging, it becomes difficult to deploy models with better performance quickly."),(0,o.kt)("h2",{id:"level-1-automated-ml-pipeline"},"Level 1: Automated ML Pipeline"),(0,o.kt)("h3",{id:"pipeline"},"Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-pipeline",src:n(7008).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,'So, in MLOps, "pipeline" is used to prevent such problems. The MLOps pipeline ensures that the model operates in the same environment as the one used by the machine learning engineer during model development, using containers like Docker. This helps prevent situations where the model doesn\'t work due to differences in the environment.'),(0,o.kt)("p",null,'However, the term "pipeline" is used in a broader context and in various tasks. What is the role of the pipeline that machine learning engineers create? The pipeline created by machine learning engineers produces trained models. Therefore, it would be more accurate to refer to it as a training pipeline rather than just a pipeline.'),(0,o.kt)("h3",{id:"continuous-training"},"Continuous Training"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-1-ct.png",src:n(7689).Z,width:"1356",height:"942"})),(0,o.kt)("p",null,"And the concept of Continuous Training (CT) is added. So why is CT necessary?"),(0,o.kt)("h4",{id:"auto-retrain"},"Auto Retrain"),(0,o.kt)("p",null,'In the real world, data exhibits a characteristic called "Data Shift," where the data distribution keeps changing over time. As a result, models trained in the past may experience performance degradation over time. The simplest and most effective solution to this problem is to retrain the model using recent data. By retraining the model according to the changed data distribution, it can regain its performance.'),(0,o.kt)("h4",{id:"auto-deploy"},"Auto Deploy"),(0,o.kt)("p",null,"However, in industries such as manufacturing, where multiple recipes are processed in a single factory, it may not always be desirable to retrain the model unconditionally. One common example is the blind spot."),(0,o.kt)("p",null,"For example, in an automotive production line, a model A was created and used for predictions. If an entirely different model B is introduced, it represents unseen data patterns, and a new model is trained for model B."),(0,o.kt)("p",null,'Now, the model will make predictions for model B. However, if the data switches back to model A, what should be done?\nIf there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.'),(0,o.kt)("p",null,"There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy."),(0,o.kt)("p",null,"To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously."),(0,o.kt)("h2",{id:"level-2-automating-the-cicd-pipeline"},"Level 2: Automating the CI/CD Pipeline"),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"level-2",src:n(5284).Z,width:"1356",height:"862"})),(0,o.kt)("p",null,"The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?"),(0,o.kt)("p",null,"In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline."),(0,o.kt)("p",null,"Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training."),(0,o.kt)("p",null,"In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly."),(0,o.kt)("p",null,"In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model."))}u.isMDXComponent=!0},344:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-0-85b288b20c458e64055199fc50b1fe86.png"},7689:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-ct-a1ac90943bd5dd8e9af840cbcf51e985.png"},7008:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-1-pipeline-b2979b34d4804546ef4005cdf0f6311a.png"},5284:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/level-2-a4bb6a840eb99f33f3027217a5a04d8e.png"},3051:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/paper-67df32c03d5288f214c8cd189f85b2ea.png"},4390:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/toon-8ff2a8fb63a502a2b419a4cd459a7e41.png"}}]); \ No newline at end of file diff --git a/en/assets/js/9c5e90dd.c2076166.js b/en/assets/js/9c5e90dd.b47077fa.js similarity index 98% rename from en/assets/js/9c5e90dd.c2076166.js rename to en/assets/js/9c5e90dd.b47077fa.js index 6f4e8067..5f9260aa 100644 --- a/en/assets/js/9c5e90dd.c2076166.js +++ b/en/assets/js/9c5e90dd.b47077fa.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5867],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},m=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},p=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),u=s(n),p=o,b=u["".concat(c,".").concat(p)]||u[p]||d[p]||a;return n?r.createElement(b,i(i({ref:t},m),{},{components:n})):r.createElement(b,i({ref:t},m))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=p;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>b,contentTitle:()=>d,default:()=>y,frontMatter:()=>u,metadata:()=>p,toc:()=>h});var r=n(7462),o=n(7294),a=n(3905),i=n(5999);function l(e){let{className:t,name:n,children:r,githubUrl:a,linkedinUrl:i,role:l}=e;return o.createElement("div",{className:t},o.createElement("div",{className:"card card--full-height"},o.createElement("div",{className:"card__header"},o.createElement("div",{className:"avatar avatar--vertical"},o.createElement("img",{className:"avatar__photo avatar__photo--xl",src:`${a}.png`,alt:`${n}'s avatar`}),o.createElement("div",{className:"avatar__intro"},o.createElement("h3",{className:"avatar__name"},n)),o.createElement("div",{className:"avatar__role"},o.createElement("h5",{className:"avatar__role"},l)))),o.createElement("div",{className:"card__body"},r),o.createElement("div",{className:"card__footer"},o.createElement("div",{className:"button-group button-group--block"},a&&o.createElement("a",{className:"button button--secondary",href:a},"GitHub"),i&&o.createElement("a",{className:"button button--secondary",href:i},"LinkedIn")))))}function c(e){return o.createElement(l,(0,r.Z)({},e,{className:"col col--6 margin-bottom--lg"}))}function s(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongseob Jeon",githubUrl:"https://github.com/aiden-jeon",linkedinUrl:"https://www.linkedin.com/in/jongseob-jeon/",role:"Project Leader"},o.createElement(i.Z,{id:"team.profile.Jongseob Jeon.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ubaa8\ub450\uc758 \ub525\ub7ec\ub2dd\uc744 \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 \ub525\ub7ec\ub2dd\uc744 \uc27d\uac8c \uc811\ud588\ub4ef\uc774 \ubaa8\ub450\uc758 MLOps\ub97c \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 MLOps\uc5d0 \uc27d\uac8c \uc811\ud560\uc218 \uc788\uae38 \ubc14\ub78d\ub2c8\ub2e4.")),o.createElement(c,{name:"Jayeon Kim",githubUrl:"https://github.com/anencore94",linkedinUrl:"https://www.linkedin.com/in/anencore94",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Jaeyeon Kim.body"},"\ube44\ud6a8\uc728\uc801\uc778 \uc791\uc5c5\uc744 \uc790\ub3d9\ud654\ud558\ub294 \uac83\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngchel Jang",githubUrl:"https://github.com/zamonia500",linkedinUrl:"https://www.linkedin.com/in/youngcheol-jang-b04a45187",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Youngchel Jang.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub2e8\uc21c\ud558\uac8c \uc0dd\uac01\ud558\ub294 \ub178\ub825\uc744 \ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")))}function m(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongsun Shinn",githubUrl:"https://github.com/jsshinn",linkedinUrl:"https://www.linkedin.com/in/jongsun-shinn-311b00140/"},o.createElement(i.Z,{id:"team.profile.Jongsun Shinn.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c ML Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Sangwoo Shim",githubUrl:"https://github.com/borishim",linkedinUrl:"https://www.linkedin.com/in/sangwooshim/"},o.createElement(i.Z,{id:"team.profile.Sangwoo Shim.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c CTO\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub9c8\ud0a4\ub098\ub77d\uc2a4\ub294 \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc0b0\uc5c5\uc6a9 AI \uc194\ub8e8\uc158\uc744 \uac1c\ubc1c\ud558\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc785\ub2c8\ub2e4. \uc0b0\uc5c5 \ud604\uc7a5\uc758 \ubb38\uc81c \ud574\uacb0\uc744 \ud1b5\ud574 \uc0ac\ub78c\uc774 \ubcf8\uc5f0\uc758 \uc77c\uc5d0 \uc9d1\uc911\ud560 \uc218 \uc788\uac8c \ub9cc\ub4dc\ub294 \uac83, \uadf8\uac83\uc774 \uc6b0\ub9ac\uac00 \ud558\ub294 \uc77c\uc785\ub2c8\ub2e4.")),o.createElement(c,{name:"Seunghyun Ko",githubUrl:"https://github.com/kosehy",linkedinUrl:"https://www.linkedin.com/in/seunghyunko/"},o.createElement(i.Z,{id:"team.profile.Seunghyun Ko.body"},"3i\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. kubeflow\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"SeungTae Kim",githubUrl:"https://github.com/RyanKor",linkedinUrl:"https://www.linkedin.com/in/seung-tae-kim-3bb15715b/"},o.createElement(i.Z,{id:"team.profile.SeungTae Kim.body"},"Genesis Lab\uc774\ub77c\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc5d0\uc11c Applied AI Engineer \uc778\ud134 \uc5c5\ubb34\ub97c \uc218\ud589\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc0dd\ud0dc\uacc4\uac00 \uc6b0\ub9ac \uc0b0\uc5c5 \uc804\ubc18\uc5d0 \ud070 \ubcc0\ud654\uc744 \uac00\uc838\uc62c \uac83\uc774\ub77c \ubbff\uc73c\uba70, \ud55c \uac78\uc74c\uc529 \ub098\uc544\uac00\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngdon Tae",githubUrl:"https://github.com/taepd",linkedinUrl:"https://www.linkedin.com/in/taepd/"},o.createElement(i.Z,{id:"team.profile.Youngdon Tae.body"},"\ubc31\ud328\ucee4\uc5d0\uc11c ML \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uc790\uc5f0\uc5b4\ucc98\ub9ac, \ucd94\ucc9c\uc2dc\uc2a4\ud15c, MLOps\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")))}const u={sidebar_position:3},d="Contributors",p={unversionedId:"contributors",id:"contributors",title:"Contributors",description:"Main Authors",source:"@site/community/contributors.md",sourceDirName:".",slug:"/contributors",permalink:"/en/community/contributors",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/contributors.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{sidebar_position:3},sidebar:"tutorialSidebar",previous:{title:"How to Contribute",permalink:"/en/community/how-to-contribute"}},b={},h=[{value:"Main Authors",id:"main-authors",level:2},{value:"Contributors",id:"contributors-1",level:2}],g={toc:h},f="wrapper";function y(e){let{components:t,...n}=e;return(0,a.kt)(f,(0,r.Z)({},g,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"contributors"},"Contributors"),(0,a.kt)("h2",{id:"main-authors"},"Main Authors"),(0,a.kt)(s,{mdxType:"MainAuthorRow"}),(0,a.kt)("h2",{id:"contributors-1"},"Contributors"),(0,a.kt)("p",null,"Thank you for contributing our tutorials!"),(0,a.kt)(m,{mdxType:"ContributorsRow"}))}y.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5867],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function i(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var c=r.createContext({}),s=function(e){var t=r.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},m=function(e){var t=s(e.components);return r.createElement(c.Provider,{value:t},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},p=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,a=e.originalType,c=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),u=s(n),p=o,b=u["".concat(c,".").concat(p)]||u[p]||d[p]||a;return n?r.createElement(b,i(i({ref:t},m),{},{components:n})):r.createElement(b,i({ref:t},m))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=n.length,i=new Array(a);i[0]=p;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[u]="string"==typeof e?e:o,i[1]=l;for(var s=2;s{n.r(t),n.d(t,{assets:()=>b,contentTitle:()=>d,default:()=>y,frontMatter:()=>u,metadata:()=>p,toc:()=>h});var r=n(7462),o=n(7294),a=n(3905),i=n(5999);function l(e){let{className:t,name:n,children:r,githubUrl:a,linkedinUrl:i,role:l}=e;return o.createElement("div",{className:t},o.createElement("div",{className:"card card--full-height"},o.createElement("div",{className:"card__header"},o.createElement("div",{className:"avatar avatar--vertical"},o.createElement("img",{className:"avatar__photo avatar__photo--xl",src:`${a}.png`,alt:`${n}'s avatar`}),o.createElement("div",{className:"avatar__intro"},o.createElement("h3",{className:"avatar__name"},n)),o.createElement("div",{className:"avatar__role"},o.createElement("h5",{className:"avatar__role"},l)))),o.createElement("div",{className:"card__body"},r),o.createElement("div",{className:"card__footer"},o.createElement("div",{className:"button-group button-group--block"},a&&o.createElement("a",{className:"button button--secondary",href:a},"GitHub"),i&&o.createElement("a",{className:"button button--secondary",href:i},"LinkedIn")))))}function c(e){return o.createElement(l,(0,r.Z)({},e,{className:"col col--6 margin-bottom--lg"}))}function s(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongseob Jeon",githubUrl:"https://github.com/aiden-jeon",linkedinUrl:"https://www.linkedin.com/in/jongseob-jeon/",role:"Project Leader"},o.createElement(i.Z,{id:"team.profile.Jongseob Jeon.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c \uba38\uc2e0\ub7ec\ub2dd \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ubaa8\ub450\uc758 \ub525\ub7ec\ub2dd\uc744 \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 \ub525\ub7ec\ub2dd\uc744 \uc27d\uac8c \uc811\ud588\ub4ef\uc774 \ubaa8\ub450\uc758 MLOps\ub97c \ud1b5\ud574 \ub9ce\uc740 \uc0ac\ub78c\ub4e4\uc774 MLOps\uc5d0 \uc27d\uac8c \uc811\ud560\uc218 \uc788\uae38 \ubc14\ub78d\ub2c8\ub2e4.")),o.createElement(c,{name:"Jayeon Kim",githubUrl:"https://github.com/anencore94",linkedinUrl:"https://www.linkedin.com/in/anencore94",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Jaeyeon Kim.body"},"\ube44\ud6a8\uc728\uc801\uc778 \uc791\uc5c5\uc744 \uc790\ub3d9\ud654\ud558\ub294 \uac83\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngchel Jang",githubUrl:"https://github.com/zamonia500",linkedinUrl:"https://www.linkedin.com/in/youngcheol-jang-b04a45187",role:"Project Member"},o.createElement(i.Z,{id:"team.profile.Youngchel Jang.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub2e8\uc21c\ud558\uac8c \uc0dd\uac01\ud558\ub294 \ub178\ub825\uc744 \ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")))}function m(){return o.createElement("div",{className:"row"},o.createElement(c,{name:"Jongsun Shinn",githubUrl:"https://github.com/jsshinn",linkedinUrl:"https://www.linkedin.com/in/jongsun-shinn-311b00140/"},o.createElement(i.Z,{id:"team.profile.Jongsun Shinn.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c ML Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Sangwoo Shim",githubUrl:"https://github.com/borishim",linkedinUrl:"https://www.linkedin.com/in/sangwooshim/"},o.createElement(i.Z,{id:"team.profile.Sangwoo Shim.body"},"\ub9c8\ud0a4\ub098\ub77d\uc2a4\uc5d0\uc11c CTO\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ub9c8\ud0a4\ub098\ub77d\uc2a4\ub294 \uba38\uc2e0\ub7ec\ub2dd \uae30\ubc18\uc758 \uc0b0\uc5c5\uc6a9 AI \uc194\ub8e8\uc158\uc744 \uac1c\ubc1c\ud558\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc785\ub2c8\ub2e4. \uc0b0\uc5c5 \ud604\uc7a5\uc758 \ubb38\uc81c \ud574\uacb0\uc744 \ud1b5\ud574 \uc0ac\ub78c\uc774 \ubcf8\uc5f0\uc758 \uc77c\uc5d0 \uc9d1\uc911\ud560 \uc218 \uc788\uac8c \ub9cc\ub4dc\ub294 \uac83, \uadf8\uac83\uc774 \uc6b0\ub9ac\uac00 \ud558\ub294 \uc77c\uc785\ub2c8\ub2e4.")),o.createElement(c,{name:"Seunghyun Ko",githubUrl:"https://github.com/kosehy",linkedinUrl:"https://www.linkedin.com/in/seunghyunko/"},o.createElement(i.Z,{id:"team.profile.Seunghyun Ko.body"},"3i\uc5d0\uc11c MLOps Engineer\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. kubeflow\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"SeungTae Kim",githubUrl:"https://github.com/RyanKor",linkedinUrl:"https://www.linkedin.com/in/seung-tae-kim-3bb15715b/"},o.createElement(i.Z,{id:"team.profile.SeungTae Kim.body"},"Genesis Lab\uc774\ub77c\ub294 \uc2a4\ud0c0\ud2b8\uc5c5\uc5d0\uc11c Applied AI Engineer \uc778\ud134 \uc5c5\ubb34\ub97c \uc218\ud589\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uba38\uc2e0\ub7ec\ub2dd \uc0dd\ud0dc\uacc4\uac00 \uc6b0\ub9ac \uc0b0\uc5c5 \uc804\ubc18\uc5d0 \ud070 \ubcc0\ud654\uc744 \uac00\uc838\uc62c \uac83\uc774\ub77c \ubbff\uc73c\uba70, \ud55c \uac78\uc74c\uc529 \ub098\uc544\uac00\uace0 \uc788\uc2b5\ub2c8\ub2e4.")),o.createElement(c,{name:"Youngdon Tae",githubUrl:"https://github.com/taepd",linkedinUrl:"https://www.linkedin.com/in/taepd/"},o.createElement(i.Z,{id:"team.profile.Youngdon Tae.body"},"\ubc31\ud328\ucee4\uc5d0\uc11c ML \uc5d4\uc9c0\ub2c8\uc5b4\ub85c \uc77c\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \uc790\uc5f0\uc5b4\ucc98\ub9ac, \ucd94\ucc9c\uc2dc\uc2a4\ud15c, MLOps\uc5d0 \uad00\uc2ec\uc774 \ub9ce\uc2b5\ub2c8\ub2e4.")))}const u={sidebar_position:3},d="Contributors",p={unversionedId:"contributors",id:"contributors",title:"Contributors",description:"Main Authors",source:"@site/community/contributors.md",sourceDirName:".",slug:"/contributors",permalink:"/en/community/contributors",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/contributors.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{sidebar_position:3},sidebar:"tutorialSidebar",previous:{title:"How to Contribute",permalink:"/en/community/how-to-contribute"}},b={},h=[{value:"Main Authors",id:"main-authors",level:2},{value:"Contributors",id:"contributors-1",level:2}],g={toc:h},f="wrapper";function y(e){let{components:t,...n}=e;return(0,a.kt)(f,(0,r.Z)({},g,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"contributors"},"Contributors"),(0,a.kt)("h2",{id:"main-authors"},"Main Authors"),(0,a.kt)(s,{mdxType:"MainAuthorRow"}),(0,a.kt)("h2",{id:"contributors-1"},"Contributors"),(0,a.kt)("p",null,"Thank you for contributing our tutorials!"),(0,a.kt)(m,{mdxType:"ContributorsRow"}))}y.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/a1ee4268.4867f838.js b/en/assets/js/a1ee4268.a54c7622.js similarity index 98% rename from en/assets/js/a1ee4268.4867f838.js rename to en/assets/js/a1ee4268.a54c7622.js index 496fa338..220f65c5 100644 --- a/en/assets/js/a1ee4268.4867f838.js +++ b/en/assets/js/a1ee4268.a54c7622.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6614],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>h});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function a(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var s=r.createContext({}),p=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):a(a({},n),e)),t},d=function(e){var n=p(e.components);return r.createElement(s.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},m=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),m=o,h=c["".concat(s,".").concat(m)]||c[m]||u[m]||i;return t?r.createElement(h,a(a({ref:n},d),{},{components:t})):r.createElement(h,a({ref:n},d))}));function h(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var i=t.length,a=new Array(i);a[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>a,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var r=t(7462),o=(t(7294),t(3905));const i={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},a=void 0,l={unversionedId:"api-deployment/what-is-api-deployment",id:"version-1.0/api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/en/docs/1.0/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/what-is-api-deployment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/en/docs/1.0/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/1.0/api-deployment/seldon-iris"}},s={},p=[{value:"What is API Deployment?",id:"what-is-api-deployment",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],d={toc:p},c="wrapper";function u(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,r.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"what-is-api-deployment"},"What is API Deployment?"),(0,o.kt)("p",null,"After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily."),(0,o.kt)("p",null,"When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people."),(0,o.kt)("p",null,"Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services."),(0,o.kt)("p",null,"However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines."),(0,o.kt)("p",null,"Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response."),(0,o.kt)("p",null,"Some well-known open-source inference engines include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow: Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch: Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"ONNX: ONNX Runtime"))),(0,o.kt)("p",null,"While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost."),(0,o.kt)("p",null,"Deploying and serving the model's inference results through an API is called ",(0,o.kt)("strong",{parentName:"p"},"API deployment"),"."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks."),(0,o.kt)("p",null,"To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment."),(0,o.kt)("p",null,"Some popular serving frameworks include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,"In ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", we use Seldon Core to demonstrate the process of API deployment."))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6614],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>h});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function a(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var s=r.createContext({}),p=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):a(a({},n),e)),t},d=function(e){var n=p(e.components);return r.createElement(s.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},m=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),m=o,h=c["".concat(s,".").concat(m)]||c[m]||u[m]||i;return t?r.createElement(h,a(a({ref:n},d),{},{components:t})):r.createElement(h,a({ref:n},d))}));function h(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var i=t.length,a=new Array(i);a[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>a,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var r=t(7462),o=(t(7294),t(3905));const i={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},a=void 0,l={unversionedId:"api-deployment/what-is-api-deployment",id:"version-1.0/api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/en/docs/1.0/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/what-is-api-deployment.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/en/docs/1.0/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/1.0/api-deployment/seldon-iris"}},s={},p=[{value:"What is API Deployment?",id:"what-is-api-deployment",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],d={toc:p},c="wrapper";function u(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,r.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"what-is-api-deployment"},"What is API Deployment?"),(0,o.kt)("p",null,"After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily."),(0,o.kt)("p",null,"When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people."),(0,o.kt)("p",null,"Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services."),(0,o.kt)("p",null,"However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines."),(0,o.kt)("p",null,"Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response."),(0,o.kt)("p",null,"Some well-known open-source inference engines include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow: Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch: Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"ONNX: ONNX Runtime"))),(0,o.kt)("p",null,"While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost."),(0,o.kt)("p",null,"Deploying and serving the model's inference results through an API is called ",(0,o.kt)("strong",{parentName:"p"},"API deployment"),"."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks."),(0,o.kt)("p",null,"To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment."),(0,o.kt)("p",null,"Some popular serving frameworks include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,"In ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", we use Seldon Core to demonstrate the process of API deployment."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/a243e695.1102efb3.js b/en/assets/js/a243e695.145dcdc6.js similarity index 99% rename from en/assets/js/a243e695.1102efb3.js rename to en/assets/js/a243e695.145dcdc6.js index 491159c5..8b09075c 100644 --- a/en/assets/js/a243e695.1102efb3.js +++ b/en/assets/js/a243e695.145dcdc6.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5597],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>u});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),d=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return a.createElement(l.Provider,{value:t},e.children)},c="mdxType",h={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=d(n),m=o,u=c["".concat(l,".").concat(m)]||c[m]||h[m]||i;return n?a.createElement(u,r(r({ref:t},p),{},{components:n})):a.createElement(u,r({ref:t},p))}));function u(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>h,frontMatter:()=>i,metadata:()=>s,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const i={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/intro",id:"version-1.0/introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/en/docs/1.0/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/en/docs/1.0/introduction/levels"}},l={},d=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML + Ops",id:"1-ml--ops",level:3},{value:"Rule-Based Approach",id:"rule-based-approach",level:4},{value:"Machine Learning Approach",id:"machine-learning-approach",level:4},{value:"Deep Learning Approach",id:"deep-learning-approach",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) Conclusion",id:"3-conclusion",level:3}],p={toc:d},c="wrapper";function h(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,a.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,o.kt)("p",null,"Since 2012, when Alexnet was introduced, Machine Learning and Deep Learning have been introduced in any domain where data exists, such as Computer Vision and Natural Language Processing. Deep Learning and Machine Learning were referred to collectively as AI, and the need for AI was shouted from many media. And many companies conducted numerous projects using Machine Learning and Deep Learning. But what was the result? Byungchan Eum, the Head of North East Asia at Element AI, said \u201cIf 10 companies start an AI project, 9 of them will only be able to do concept validation (POC)\u201d."),(0,o.kt)("p",null,"In this way, in many projects, Machine Learning and Deep Learning only showed the possibility that they could solve this problem and then disappeared. And around this time, the outlook that ",(0,o.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI Winter was coming again")," also began to emerge."),(0,o.kt)("p",null,"Why did most projects end at the concept validation (POC) stage? Because it is impossible to operate an actual service with only Machine Learning and Deep Learning code."),(0,o.kt)("p",null,"At the actual service stage, the portion taken up by machine learning and deep learning code is not as large as one would think, so one must consider many other aspects besides simply the performance of the model. Google has pointed out this problem in their 2015 paper ",(0,o.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),". However, at the time this paper was released, many ML engineers were busy proving the potential of deep learning and machine learning, so the points made in the paper were not given much attention. "),(0,o.kt)("p",null,"And after a few years, machine learning and deep learning had proven their potential and people were now looking to apply it to actual services. However, soon many people realized that actual services were not as easy as they thought."),(0,o.kt)("h2",{id:"devops"},"Devops"),(0,o.kt)("p",null,"MLOps is not a new concept, but rather a term derived from the development methodology called DevOps. Therefore, understanding DevOps can help in understanding MLOps."),(0,o.kt)("h3",{id:"devops-1"},"DevOps"),(0,o.kt)("p",null,'DevOps is a portmanteau of "Development" and "Operations," referring to a development and operations methodology that emphasizes communication, collaboration, and integration between software developers and IT professionals. It encompasses both the development and operation phases of software, aiming to achieve a symbiotic relationship between the two. The primary goal of DevOps is to enable organizations to develop and deploy software products and services rapidly by fostering close collaboration and interdependence between development and operations teams.'),(0,o.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,o.kt)("p",null,"Let's explore why DevOps is necessary through a simple scenario."),(0,o.kt)("p",null,"In the early stages of a service, there are fewer supported features, and the team or company is relatively small. At this point, there may not be a clear distinction between development and operations, or the teams may be small. The key point here is the small scale. In such cases, there are many points of contact for effective communication, and with a limited number of services to focus on, it is possible to rapidly improve the service."),(0,o.kt)("p",null,"However, as the service scales up, the development and operations teams tend to separate, and the physical limitations of communication channels become apparent. For example, in meetings involving multiple teams, only team leaders or a small number of seniors may attend, rather than the entire team. These limitations in communication channels inevitably lead to a lack of communication. Consequently, the development team continues to develop new features, while the operations team faces issues during deployment caused by the features developed by the development team."),(0,o.kt)("p",null,"When such situations are repeated, it can lead to organizational silos, a phenomenon known as silo mentality."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"silo",src:n(1598).Z,width:"892",height:"498"})),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},'Indeed, the term "silo" originally refers to a tall, cylindrical structure used for storing grain or livestock feed. Silos are designed to keep the stored materials separate and prevent them from mixing.\nIn the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.')),(0,o.kt)("p",null,"The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations."),(0,o.kt)("h3",{id:"cicd"},"CI/CD"),(0,o.kt)("p",null,"Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cicd",src:n(7204).Z,width:"1400",height:"299"})),(0,o.kt)("p",null,"Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams."),(0,o.kt)("h2",{id:"mlops"},"MLOps"),(0,o.kt)("h3",{id:"1-ml--ops"},"1) ML + Ops"),(0,o.kt)("p",null,"DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience."),(0,o.kt)("p",null,'MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.'),(0,o.kt)("p",null,"MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system."),(0,o.kt)("h4",{id:"rule-based-approach"},"Rule-Based Approach"),(0,o.kt)("p",null,"In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification."),(0,o.kt)("h4",{id:"machine-learning-approach"},"Machine Learning Approach"),(0,o.kt)("p",null,"As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed."),(0,o.kt)("h4",{id:"deep-learning-approach"},"Deep Learning Approach"),(0,o.kt)("p",null,"When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed."),(0,o.kt)("p",null,"By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"graph",src:n(2276).Z,width:"752",height:"582"})),(0,o.kt)("p",null,"If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning."),(0,o.kt)("p",null,"If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge."),(0,o.kt)("p",null,"Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively."),(0,o.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,o.kt)("p",null,"However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects."),(0,o.kt)("p",null,"In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary."),(0,o.kt)("h3",{id:"3-conclusion"},"3) Conclusion"),(0,o.kt)("p",null,"In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models."))}h.isMDXComponent=!0},7204:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},2276:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},1598:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5597],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>u});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),d=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return a.createElement(l.Provider,{value:t},e.children)},c="mdxType",h={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=d(n),m=o,u=c["".concat(l,".").concat(m)]||c[m]||h[m]||i;return n?a.createElement(u,r(r({ref:t},p),{},{components:n})):a.createElement(u,r({ref:t},p))}));function u(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>h,frontMatter:()=>i,metadata:()=>s,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const i={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/intro",id:"version-1.0/introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/en/docs/1.0/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/en/docs/1.0/introduction/levels"}},l={},d=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML + Ops",id:"1-ml--ops",level:3},{value:"Rule-Based Approach",id:"rule-based-approach",level:4},{value:"Machine Learning Approach",id:"machine-learning-approach",level:4},{value:"Deep Learning Approach",id:"deep-learning-approach",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) Conclusion",id:"3-conclusion",level:3}],p={toc:d},c="wrapper";function h(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,a.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,o.kt)("p",null,"Since 2012, when Alexnet was introduced, Machine Learning and Deep Learning have been introduced in any domain where data exists, such as Computer Vision and Natural Language Processing. Deep Learning and Machine Learning were referred to collectively as AI, and the need for AI was shouted from many media. And many companies conducted numerous projects using Machine Learning and Deep Learning. But what was the result? Byungchan Eum, the Head of North East Asia at Element AI, said \u201cIf 10 companies start an AI project, 9 of them will only be able to do concept validation (POC)\u201d."),(0,o.kt)("p",null,"In this way, in many projects, Machine Learning and Deep Learning only showed the possibility that they could solve this problem and then disappeared. And around this time, the outlook that ",(0,o.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI Winter was coming again")," also began to emerge."),(0,o.kt)("p",null,"Why did most projects end at the concept validation (POC) stage? Because it is impossible to operate an actual service with only Machine Learning and Deep Learning code."),(0,o.kt)("p",null,"At the actual service stage, the portion taken up by machine learning and deep learning code is not as large as one would think, so one must consider many other aspects besides simply the performance of the model. Google has pointed out this problem in their 2015 paper ",(0,o.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),". However, at the time this paper was released, many ML engineers were busy proving the potential of deep learning and machine learning, so the points made in the paper were not given much attention. "),(0,o.kt)("p",null,"And after a few years, machine learning and deep learning had proven their potential and people were now looking to apply it to actual services. However, soon many people realized that actual services were not as easy as they thought."),(0,o.kt)("h2",{id:"devops"},"Devops"),(0,o.kt)("p",null,"MLOps is not a new concept, but rather a term derived from the development methodology called DevOps. Therefore, understanding DevOps can help in understanding MLOps."),(0,o.kt)("h3",{id:"devops-1"},"DevOps"),(0,o.kt)("p",null,'DevOps is a portmanteau of "Development" and "Operations," referring to a development and operations methodology that emphasizes communication, collaboration, and integration between software developers and IT professionals. It encompasses both the development and operation phases of software, aiming to achieve a symbiotic relationship between the two. The primary goal of DevOps is to enable organizations to develop and deploy software products and services rapidly by fostering close collaboration and interdependence between development and operations teams.'),(0,o.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,o.kt)("p",null,"Let's explore why DevOps is necessary through a simple scenario."),(0,o.kt)("p",null,"In the early stages of a service, there are fewer supported features, and the team or company is relatively small. At this point, there may not be a clear distinction between development and operations, or the teams may be small. The key point here is the small scale. In such cases, there are many points of contact for effective communication, and with a limited number of services to focus on, it is possible to rapidly improve the service."),(0,o.kt)("p",null,"However, as the service scales up, the development and operations teams tend to separate, and the physical limitations of communication channels become apparent. For example, in meetings involving multiple teams, only team leaders or a small number of seniors may attend, rather than the entire team. These limitations in communication channels inevitably lead to a lack of communication. Consequently, the development team continues to develop new features, while the operations team faces issues during deployment caused by the features developed by the development team."),(0,o.kt)("p",null,"When such situations are repeated, it can lead to organizational silos, a phenomenon known as silo mentality."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"silo",src:n(1598).Z,width:"892",height:"498"})),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},'Indeed, the term "silo" originally refers to a tall, cylindrical structure used for storing grain or livestock feed. Silos are designed to keep the stored materials separate and prevent them from mixing.\nIn the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.')),(0,o.kt)("p",null,"The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations."),(0,o.kt)("h3",{id:"cicd"},"CI/CD"),(0,o.kt)("p",null,"Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cicd",src:n(7204).Z,width:"1400",height:"299"})),(0,o.kt)("p",null,"Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams."),(0,o.kt)("h2",{id:"mlops"},"MLOps"),(0,o.kt)("h3",{id:"1-ml--ops"},"1) ML + Ops"),(0,o.kt)("p",null,"DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience."),(0,o.kt)("p",null,'MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.'),(0,o.kt)("p",null,"MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system."),(0,o.kt)("h4",{id:"rule-based-approach"},"Rule-Based Approach"),(0,o.kt)("p",null,"In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification."),(0,o.kt)("h4",{id:"machine-learning-approach"},"Machine Learning Approach"),(0,o.kt)("p",null,"As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed."),(0,o.kt)("h4",{id:"deep-learning-approach"},"Deep Learning Approach"),(0,o.kt)("p",null,"When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed."),(0,o.kt)("p",null,"By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"graph",src:n(2276).Z,width:"752",height:"582"})),(0,o.kt)("p",null,"If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning."),(0,o.kt)("p",null,"If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge."),(0,o.kt)("p",null,"Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively."),(0,o.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,o.kt)("p",null,"However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects."),(0,o.kt)("p",null,"In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary."),(0,o.kt)("h3",{id:"3-conclusion"},"3) Conclusion"),(0,o.kt)("p",null,"In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models."))}h.isMDXComponent=!0},7204:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},2276:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},1598:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file diff --git a/en/assets/js/a6269ae6.d9fc6419.js b/en/assets/js/a6269ae6.9bd76b1d.js similarity index 99% rename from en/assets/js/a6269ae6.d9fc6419.js rename to en/assets/js/a6269ae6.9bd76b1d.js index f5d13817..4d7b8c9e 100644 --- a/en/assets/js/a6269ae6.d9fc6419.js +++ b/en/assets/js/a6269ae6.9bd76b1d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7977],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>k});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=n.createContext({}),u=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},c=function(e){var t=u(e.components);return n.createElement(i.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(a),m=r,k=p["".concat(i,".").concat(m)]||p[m]||d[m]||o;return a?n.createElement(k,l(l({ref:t},c),{},{components:a})):n.createElement(k,l({ref:t},c))}));function k(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[p]="string"==typeof e?e:r,l[1]=s;for(var u=2;u{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>u});var n=a(7462),r=(a(7294),a(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/en/docs/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-prerequisite.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/en/docs/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},u=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],c={toc:u},p="wrapper";function d(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"On this page, we describe the components that need to be installed or configured on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,r.kt)("strong",{parentName:"p"},"Client")," prior to installing Kubernetes."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"In order to enable smooth communication between the Client and the Cluster, Port-Forwarding needs to be performed. To enable Port-Forwarding, the following packages need to be installed on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install apt packages for docker."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"add docker official GPG key."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"When installing Docker using the apt package manager, configure it to retrieve from the stable repository:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check the currently available Docker versions for installation:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"Verify if the version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," is listed among the output:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"If the addition was successful, the following output will be displayed:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install Docker version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal"),":"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check docker is installed."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")))),(0,r.kt)("p",null," If added successfully, it will output as follows:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')),(0,r.kt)("ol",{start:7},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Add permissions to use Docker commands without the ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo")," keyword by executing the following commands:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"To verify that you can now use Docker commands without ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo"),", run the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command again:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message after executing the command, it means that the permissions have been successfully added:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"In order for kubelet to work properly, ",(0,r.kt)("strong",{parentName:"p"},"cluster")," nodes must turn off the virtual memory called swap. The following command turns off the swap.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Download kubectl version v1.21.7 to the current folder:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Change the file permissions and move it to the appropriate location to make kubectl executable:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Verify that kubectl is installed correctly:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message, it means that kubectl is installed successfully:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"Configuring Multiple kubeconfig on Your Machine")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"kubectx - Switch between Kubernetes contexts easily"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/docs/tasks/tools/install-kubectl-linux/"},"Install and Set Up kubectl on Linux"))))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7977],{3905:(e,t,a)=>{a.d(t,{Zo:()=>c,kt:()=>k});var n=a(7294);function r(e,t,a){return t in e?Object.defineProperty(e,t,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[t]=a,e}function o(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}function l(e){for(var t=1;t=0||(r[a]=e[a]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var i=n.createContext({}),u=function(e){var t=n.useContext(i),a=t;return e&&(a="function"==typeof e?e(t):l(l({},t),e)),a},c=function(e){var t=u(e.components);return n.createElement(i.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var a=e.components,r=e.mdxType,o=e.originalType,i=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(a),m=r,k=p["".concat(i,".").concat(m)]||p[m]||d[m]||o;return a?n.createElement(k,l(l({ref:t},c),{},{components:a})):n.createElement(k,l({ref:t},c))}));function k(e,t){var a=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=a.length,l=new Array(o);l[0]=m;var s={};for(var i in t)hasOwnProperty.call(t,i)&&(s[i]=t[i]);s.originalType=e,s[p]="string"==typeof e?e:r,l[1]=s;for(var u=2;u{a.r(t),a.d(t,{assets:()=>i,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>s,toc:()=>u});var n=a(7462),r=(a(7294),a(3905));const o={title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},l=void 0,s={unversionedId:"setup-kubernetes/install-prerequisite",id:"setup-kubernetes/install-prerequisite",title:"3. Install Prerequisite",description:"Install docker",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-prerequisite.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/install-prerequisite",permalink:"/en/docs/setup-kubernetes/install-prerequisite",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-prerequisite.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Install Prerequisite",description:"Install docker",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Sangwoo Shim"]},sidebar:"tutorialSidebar",previous:{title:"2. Setup Kubernetes",permalink:"/en/docs/setup-kubernetes/kubernetes"},next:{title:"4.1. K3s",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s"}},i={},u=[{value:"Install apt packages",id:"install-apt-packages",level:2},{value:"Install Docker",id:"install-docker",level:2},{value:"Turn off Swap Memory",id:"turn-off-swap-memory",level:2},{value:"Install Kubectl",id:"install-kubectl",level:2},{value:"References",id:"references",level:2}],c={toc:u},p="wrapper";function d(e){let{components:t,...a}=e;return(0,r.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"On this page, we describe the components that need to be installed or configured on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,r.kt)("strong",{parentName:"p"},"Client")," prior to installing Kubernetes."),(0,r.kt)("h2",{id:"install-apt-packages"},"Install apt packages"),(0,r.kt)("p",null,"In order to enable smooth communication between the Client and the Cluster, Port-Forwarding needs to be performed. To enable Port-Forwarding, the following packages need to be installed on the ",(0,r.kt)("strong",{parentName:"p"},"Cluster"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update\nsudo apt-get install -y socat\n")),(0,r.kt)("h2",{id:"install-docker"},"Install Docker"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install apt packages for docker."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"add docker official GPG key."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"When installing Docker using the apt package manager, configure it to retrieve from the stable repository:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'echo \\\n"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \\\n$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check the currently available Docker versions for installation:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get update && apt-cache madison docker-ce\n")),(0,r.kt)("p",{parentName:"li"},"Verify if the version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal")," is listed among the output:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal\n")),(0,r.kt)("p",{parentName:"li"},"If the addition was successful, the following output will be displayed:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Install Docker version ",(0,r.kt)("inlineCode",{parentName:"p"},"5:20.10.11~3-0~ubuntu-focal"),":"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal\n\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Check docker is installed."),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run hello-world\n")))),(0,r.kt)("p",null," If added successfully, it will output as follows:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ sudo docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')),(0,r.kt)("ol",{start:7},(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Add permissions to use Docker commands without the ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo")," keyword by executing the following commands:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo groupadd docker\nsudo usermod -aG docker $USER\nnewgrp docker\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"To verify that you can now use Docker commands without ",(0,r.kt)("inlineCode",{parentName:"p"},"sudo"),", run the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run")," command again:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run hello-world\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message after executing the command, it means that the permissions have been successfully added:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'mlops@ubuntu:~$ docker run hello-world\n\nHello from Docker!\nThis message shows that your installation appears to be working correctly.\n\nTo generate this message, Docker took the following steps:\n1. The Docker client contacted the Docker daemon.\n2. The Docker daemon pulled the "hello-world" image from the Docker Hub.\n (amd64)\n3. The Docker daemon created a new container from that image which runs the\n executable that produces the output you are currently reading.\n4. The Docker daemon streamed that output to the Docker client, which sent it\n to your terminal.\n\nTo try something more ambitious, you can run an Ubuntu container with:\n$ docker run -it ubuntu bash\n\nShare images, automate workflows, and more with a free Docker ID:\nhttps://hub.docker.com/\n\nFor more examples and ideas, visit:\nhttps://docs.docker.com/get-started/\n')))),(0,r.kt)("h2",{id:"turn-off-swap-memory"},"Turn off Swap Memory"),(0,r.kt)("p",null,"In order for kubelet to work properly, ",(0,r.kt)("strong",{parentName:"p"},"cluster")," nodes must turn off the virtual memory called swap. The following command turns off the swap.",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("strong",{parentName:"p"},"(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)")),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo sed -i '/ swap / s/^\\(.*\\)$/#\\1/g' /etc/fstab\nsudo swapoff -a\n")),(0,r.kt)("h2",{id:"install-kubectl"},"Install Kubectl"),(0,r.kt)("p",null,"kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Download kubectl version v1.21.7 to the current folder:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Change the file permissions and move it to the appropriate location to make kubectl executable:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl\n"))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Verify that kubectl is installed correctly:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl version --client\n")),(0,r.kt)("p",{parentName:"li"},"If you see the following message, it means that kubectl is installed successfully:"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}\n'))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://dev.to/aabiseverywhere/configuring-multiple-kubeconfig-on-your-machine-59eo"},"Configuring Multiple kubeconfig on Your Machine")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://github.com/ahmetb/kubectx"},"kubectx - Switch between Kubernetes contexts easily"))))),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/install/ubuntu/"},"Install Docker Engine on Ubuntu")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://kubernetes.io/docs/tasks/tools/install-kubectl-linux/"},"Install and Set Up kubectl on Linux"))))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/ab4ab49e.65fea98d.js b/en/assets/js/ab4ab49e.4410b0a8.js similarity index 99% rename from en/assets/js/ab4ab49e.65fea98d.js rename to en/assets/js/ab4ab49e.4410b0a8.js index 6fd1f751..b7ab3bec 100644 --- a/en/assets/js/ab4ab49e.65fea98d.js +++ b/en/assets/js/ab4ab49e.4410b0a8.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9259],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>h});var o=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function l(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=o.createContext({}),d=function(e){var n=o.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):l(l({},n),e)),t},m=function(e){var n=d(e.components);return o.createElement(s.Provider,{value:n},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},c=o.forwardRef((function(e,n){var t=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),p=d(t),c=a,h=p["".concat(s,".").concat(c)]||p[c]||u[c]||r;return t?o.createElement(h,l(l({ref:n},m),{},{components:t})):o.createElement(h,l({ref:n},m))}));function h(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var r=t.length,l=new Array(r);l[0]=c;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:a,l[1]=i;for(var d=2;d{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>i,toc:()=>d});var o=t(7462),a=(t(7294),t(3905));const r={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},l=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"version-1.0/api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/en/docs/1.0/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-fields.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/en/docs/1.0/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/en/docs/1.0/api-deployment/seldon-mlflow"}},s={},d=[{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],m={toc:d},p="wrapper";function u(e){let{components:n,...t}=e;return(0,a.kt)(p,(0,o.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Summary of how Seldon Core creates an API server:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"initContainer downloads the required model from the model repository."),(0,a.kt)("li",{parentName:"ol"},"The downloaded model is passed to the container."),(0,a.kt)("li",{parentName:"ol"},"The container runs an API server enclosing the model."),(0,a.kt)("li",{parentName:"ol"},"The API can be requested at the generated API server address to receive the inference values from the model.")),(0,a.kt)("p",null,"The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," fields of SeldonDeployment are required fields. ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," must be a single array consisting of ",(0,a.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph")," defined. Here also, ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect."),(0,a.kt)("p",null,"Now let's take a look at the fields that need to be defined in ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph"),"."),(0,a.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," must be a single array consisting of the ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," key. The ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," must have the fields ",(0,a.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"initContainers")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"containers")," defined."),(0,a.kt)("h3",{id:"volumes"},"volumes"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"Volumes")," refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"emptyDir"),". These values are used only once when downloading and moving the models, so they do not need to be modified significantly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"args")," field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations."),(0,a.kt)("p",null,"initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry. "),(0,a.kt)("p",null,"The value of initContainer consists of n arrays, and each model needs to be specified separately."),(0,a.kt)("h4",{id:"name"},"name"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"name")," is the name of the pod in Kubernetes, and it is recommended to use ",(0,a.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," for debugging. "),(0,a.kt)("h4",{id:"image"},"image"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"image")," is the name of the image used to download the model, and there are two recommended images by"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,a.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,a.kt)("p",null,"For more detailed information, please refer to the following resources:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,a.kt)("p",null,"In MLOps for ALL, we use kfserving for downloading and storing models."),(0,a.kt)("h4",{id:"args"},"args"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,a.kt)("p",null,"When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (",(0,a.kt)("inlineCode",{parentName:"p"},"run"),"), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models"),")."),(0,a.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"volumeMounts")," is a field that attaches volumes to the Kubernetes to share ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models")," as described in volumes. For more information, refer to Kubernetes Volume ",(0,a.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"Kubernetes Volume"),'."'),(0,a.kt)("h3",{id:"container"},"container"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null," Container defines the fields that determine the configuration when the model is run in an API form."),(0,a.kt)("h4",{id:"name-1"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the pod in Kubernetes. It should be the name of the model being used."),(0,a.kt)("h4",{id:"image-1"},"image"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"image")," field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded."),(0,a.kt)("p",null,"Seldon Core provides official images for different types of models, including:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,a.kt)("p",null,"You can choose the appropriate image based on the type of model you are using."),(0,a.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,a.kt)("p",null,"This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, ",(0,a.kt)("inlineCode",{parentName:"p"},"readOnly: true")," will also be given."),(0,a.kt)("h4",{id:"securitycontext"},"securityContext"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null,"When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service)."),(0,a.kt)("h2",{id:"graph"},"graph"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"This is a field that defines the order in which the model operates."),(0,a.kt)("h3",{id:"name-2"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the model graph. It should match the name defined in the container."),(0,a.kt)("h3",{id:"type"},"type"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"type")," field can have four different values:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"MODEL"),(0,a.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"ROUTER")),(0,a.kt)("p",null,"For detailed explanations of each type, you can refer to the ",(0,a.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"."),(0,a.kt)("h3",{id:"parameters"},"parameters"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"parameters")," field contains values used in the class init. For the sklearnserver, you can find the required values in the ",(0,a.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"following file"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,a.kt)("p",null,"If you look at the code, you can define ",(0,a.kt)("inlineCode",{parentName:"p"},"model_uri")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"method"),"."),(0,a.kt)("h3",{id:"children"},"children"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"children")," field is used when creating the sequence diagram. More details about this field will be explained on the following page."))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9259],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>h});var o=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);n&&(o=o.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,o)}return t}function l(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=o.createContext({}),d=function(e){var n=o.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):l(l({},n),e)),t},m=function(e){var n=d(e.components);return o.createElement(s.Provider,{value:n},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return o.createElement(o.Fragment,{},n)}},c=o.forwardRef((function(e,n){var t=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),p=d(t),c=a,h=p["".concat(s,".").concat(c)]||p[c]||u[c]||r;return t?o.createElement(h,l(l({ref:n},m),{},{components:t})):o.createElement(h,l({ref:n},m))}));function h(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var r=t.length,l=new Array(r);l[0]=c;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:a,l[1]=i;for(var d=2;d{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>u,frontMatter:()=>r,metadata:()=>i,toc:()=>d});var o=t(7462),a=(t(7294),t(3905));const r={title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},l=void 0,i={unversionedId:"api-deployment/seldon-fields",id:"version-1.0/api-deployment/seldon-fields",title:"4. Seldon Fields",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-fields.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-fields",permalink:"/en/docs/1.0/api-deployment/seldon-fields",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-fields.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Seldon Fields",description:"",sidebar_position:4,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"3. Seldon Monitoring",permalink:"/en/docs/1.0/api-deployment/seldon-pg"},next:{title:"5. Model from MLflow",permalink:"/en/docs/1.0/api-deployment/seldon-mlflow"}},s={},d=[{value:"componentSpecs",id:"componentspecs",level:2},{value:"volumes",id:"volumes",level:3},{value:"name",id:"name",level:4},{value:"image",id:"image",level:4},{value:"args",id:"args",level:4},{value:"volumeMounts",id:"volumemounts",level:3},{value:"container",id:"container",level:3},{value:"name",id:"name-1",level:4},{value:"image",id:"image-1",level:4},{value:"volumeMounts",id:"volumemounts-1",level:4},{value:"securityContext",id:"securitycontext",level:4},{value:"graph",id:"graph",level:2},{value:"name",id:"name-2",level:3},{value:"type",id:"type",level:3},{value:"parameters",id:"parameters",level:3},{value:"children",id:"children",level:3}],m={toc:d},p="wrapper";function u(e){let{components:n,...t}=e;return(0,a.kt)(p,(0,o.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Summary of how Seldon Core creates an API server:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"initContainer downloads the required model from the model repository."),(0,a.kt)("li",{parentName:"ol"},"The downloaded model is passed to the container."),(0,a.kt)("li",{parentName:"ol"},"The container runs an API server enclosing the model."),(0,a.kt)("li",{parentName:"ol"},"The API can be requested at the generated API server address to receive the inference values from the model.")),(0,a.kt)("p",null,"The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n\n containers:\n - name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," fields of SeldonDeployment are required fields. ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. ",(0,a.kt)("inlineCode",{parentName:"p"},"predictors")," must be a single array consisting of ",(0,a.kt)("inlineCode",{parentName:"p"},"name"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph")," defined. Here also, ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect."),(0,a.kt)("p",null,"Now let's take a look at the fields that need to be defined in ",(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"graph"),"."),(0,a.kt)("h2",{id:"componentspecs"},"componentSpecs"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"componentSpecs")," must be a single array consisting of the ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," key. The ",(0,a.kt)("inlineCode",{parentName:"p"},"spec")," must have the fields ",(0,a.kt)("inlineCode",{parentName:"p"},"volumes"),", ",(0,a.kt)("inlineCode",{parentName:"p"},"initContainers")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"containers")," defined."),(0,a.kt)("h3",{id:"volumes"},"volumes"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumes:\n- name: model-provision-location\n emptyDir: {}\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"Volumes")," refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"emptyDir"),". These values are used only once when downloading and moving the models, so they do not need to be modified significantly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'- name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n')),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"args")," field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations."),(0,a.kt)("p",null,"initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry. "),(0,a.kt)("p",null,"The value of initContainer consists of n arrays, and each model needs to be specified separately."),(0,a.kt)("h4",{id:"name"},"name"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"name")," is the name of the pod in Kubernetes, and it is recommended to use ",(0,a.kt)("inlineCode",{parentName:"p"},"{model_name}-initializer")," for debugging. "),(0,a.kt)("h4",{id:"image"},"image"),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"image")," is the name of the image used to download the model, and there are two recommended images by"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"gcr.io/kfserving/storage-initializer:v0.4.0"),(0,a.kt)("li",{parentName:"ul"},"seldonio/rclone-storage-initializer:1.13.0-dev")),(0,a.kt)("p",null,"For more detailed information, please refer to the following resources:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/servers/kfserving-storage-initializer.html"},"kfserving")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core/tree/master/components/rclone-storage-initializer"},"rclone"))),(0,a.kt)("p",null,"In MLOps for ALL, we use kfserving for downloading and storing models."),(0,a.kt)("h4",{id:"args"},"args"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'args:\n - "gs://seldon-models/v1.12.0-dev/sklearn/iris"\n - "/mnt/models"\n')),(0,a.kt)("p",null,"When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (",(0,a.kt)("inlineCode",{parentName:"p"},"run"),"), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models"),")."),(0,a.kt)("h3",{id:"volumemounts"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n")),(0,a.kt)("p",null,(0,a.kt)("inlineCode",{parentName:"p"},"volumeMounts")," is a field that attaches volumes to the Kubernetes to share ",(0,a.kt)("inlineCode",{parentName:"p"},"/mnt/models")," as described in volumes. For more information, refer to Kubernetes Volume ",(0,a.kt)("a",{parentName:"p",href:"https://kubernetes.io/docs/concepts/storage/volumes/"},"Kubernetes Volume"),'."'),(0,a.kt)("h3",{id:"container"},"container"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"containers:\n- name: model\n image: seldonio/sklearnserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null," Container defines the fields that determine the configuration when the model is run in an API form."),(0,a.kt)("h4",{id:"name-1"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the pod in Kubernetes. It should be the name of the model being used."),(0,a.kt)("h4",{id:"image-1"},"image"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"image")," field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded."),(0,a.kt)("p",null,"Seldon Core provides official images for different types of models, including:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"seldonio/sklearnserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/mlflowserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/xgboostserver"),(0,a.kt)("li",{parentName:"ul"},"seldonio/tfserving")),(0,a.kt)("p",null,"You can choose the appropriate image based on the type of model you are using."),(0,a.kt)("h4",{id:"volumemounts-1"},"volumeMounts"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"volumeMounts:\n- mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n")),(0,a.kt)("p",null,"This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, ",(0,a.kt)("inlineCode",{parentName:"p"},"readOnly: true")," will also be given."),(0,a.kt)("h4",{id:"securitycontext"},"securityContext"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n")),(0,a.kt)("p",null,"When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service)."),(0,a.kt)("h2",{id:"graph"},"graph"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"This is a field that defines the order in which the model operates."),(0,a.kt)("h3",{id:"name-2"},"name"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"name")," field refers to the name of the model graph. It should match the name defined in the container."),(0,a.kt)("h3",{id:"type"},"type"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"type")," field can have four different values:"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"MODEL"),(0,a.kt)("li",{parentName:"ol"},"OUTPUT_TRANSFORMER"),(0,a.kt)("li",{parentName:"ol"},"ROUTER")),(0,a.kt)("p",null,"For detailed explanations of each type, you can refer to the ",(0,a.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/graph-metadata.html"},"Seldon Core Complex Graphs Metadata Example"),"."),(0,a.kt)("h3",{id:"parameters"},"parameters"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"parameters")," field contains values used in the class init. For the sklearnserver, you can find the required values in the ",(0,a.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core/blob/master/servers/sklearnserver/sklearnserver/SKLearnServer.py"},"following file"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'class SKLearnServer(SeldonComponent):\n def __init__(self, model_uri: str = None, method: str = "predict_proba"):\n')),(0,a.kt)("p",null,"If you look at the code, you can define ",(0,a.kt)("inlineCode",{parentName:"p"},"model_uri")," and ",(0,a.kt)("inlineCode",{parentName:"p"},"method"),"."),(0,a.kt)("h3",{id:"children"},"children"),(0,a.kt)("p",null,"The ",(0,a.kt)("inlineCode",{parentName:"p"},"children")," field is used when creating the sequence diagram. More details about this field will be explained on the following page."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/af806db3.1f3ad8b3.js b/en/assets/js/af806db3.a5fdaba0.js similarity index 98% rename from en/assets/js/af806db3.1f3ad8b3.js rename to en/assets/js/af806db3.a5fdaba0.js index 71d4eaf4..9dc32570 100644 --- a/en/assets/js/af806db3.1f3ad8b3.js +++ b/en/assets/js/af806db3.a5fdaba0.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9010],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},c=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),m=u(n),c=a,h=m["".concat(s,".").concat(c)]||m[c]||d[c]||o;return n?r.createElement(h,l(l({ref:t},p),{},{components:n})):r.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=c;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:a,l[1]=i;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const o={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},l=void 0,i={unversionedId:"setup-kubernetes/intro",id:"setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/en/docs/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/en/docs/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/en/docs/setup-kubernetes/kubernetes"}},s={},u=[{value:"Build MLOps System",id:"build-mlops-system",level:2},{value:"Components",id:"components",level:2},{value:"Cluster",id:"cluster",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"Client",id:"client",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],p={toc:u},m="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(m,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"build-mlops-system"},"Build MLOps System"),(0,a.kt)("p",null,"The biggest barrier when studying MLOps is the difficulty of setting up and using an MLOps system. Using public cloud platforms like AWS or GCP, or commercial tools like Weights & Biases or neptune.ai, can be costly, and starting from scratch to build the entire environment can be overwhelming and confusing."),(0,a.kt)("p",null,"To address these challenges and help those who haven't been able to start with MLOps, ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will guide you on how to build and use an MLOps system from scratch, requiring only a desktop with Ubuntu installed."),(0,a.kt)("p",null,"For those who cannot prepare a Ubuntu desktop environment, use virtual machines to set up the environment."),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"If you are using Windows or an Intel-based Mac for the ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," practical exercises, you can prepare an Ubuntu desktop environment using virtual machine software such as VirtualBox or VMware. Please make sure to meet the recommended specifications when creating the virtual machine.\nHowever, for those using an M1 Mac, as of the date of writing (February 2022), VirtualBox and VMware are not available. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"Check if macOS apps are optimized for M1 Apple Silicon Mac"),")\nTherefore, if you are not using a cloud environment, you can install UTM, Virtual machines for Mac, to use virtual machines.\n(Purchasing and downloading software from the App Store is a form of donation-based payment. The free version is sufficient as it only differs in automatic updates.)\nThis virtual machine software supports the ",(0,a.kt)("em",{parentName:"p"},"Ubuntu 20.04.3 LTS")," practice operating system, enabling you to perform the exercises on an M1 Mac.")),(0,a.kt)("p",null,"However, since it is not possible to use all the elements described in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/introduction/component"},"Components of MLOps"),", ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will mainly focus on installing the representative open source software and connecting them to each other."),(0,a.kt)("p",null,"It is not meant that installing open source software in ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," is a standard, and we recommend choosing the appropriate tool that fits your situation."),(0,a.kt)("h2",{id:"components"},"Components"),(0,a.kt)("p",null,"The components of the MLOps system that we will make in this article and each version have been verified in the following environment."),(0,a.kt)("p",null,"To facilitate smooth testing, I will explain the setup of the ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,a.kt)("strong",{parentName:"p"},"Client")," as separate entities."),(0,a.kt)("p",null,"The ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," refers to a single desktop with Ubuntu installed.",(0,a.kt)("br",{parentName:"p"}),"\n","The ",(0,a.kt)("strong",{parentName:"p"},"Client")," is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes."),(0,a.kt)("h3",{id:"cluster"},"Cluster"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"Below is the list of software that needs to be installed on the Cluster:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"Below is the list of third-party software that needs to be installed using Helm:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"client"},"Client"),(0,a.kt)("p",null,"The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU: 6 cores"),(0,a.kt)("li",{parentName:"ul"},"RAM: 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK: 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU: NVIDIA GPU (optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9010],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},c=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),m=u(n),c=a,h=m["".concat(s,".").concat(c)]||m[c]||d[c]||o;return n?r.createElement(h,l(l({ref:t},p),{},{components:n})):r.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=c;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:a,l[1]=i;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const o={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},l=void 0,i={unversionedId:"setup-kubernetes/intro",id:"setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/en/docs/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/en/docs/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/en/docs/setup-kubernetes/kubernetes"}},s={},u=[{value:"Build MLOps System",id:"build-mlops-system",level:2},{value:"Components",id:"components",level:2},{value:"Cluster",id:"cluster",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"Client",id:"client",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],p={toc:u},m="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(m,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"build-mlops-system"},"Build MLOps System"),(0,a.kt)("p",null,"The biggest barrier when studying MLOps is the difficulty of setting up and using an MLOps system. Using public cloud platforms like AWS or GCP, or commercial tools like Weights & Biases or neptune.ai, can be costly, and starting from scratch to build the entire environment can be overwhelming and confusing."),(0,a.kt)("p",null,"To address these challenges and help those who haven't been able to start with MLOps, ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will guide you on how to build and use an MLOps system from scratch, requiring only a desktop with Ubuntu installed."),(0,a.kt)("p",null,"For those who cannot prepare a Ubuntu desktop environment, use virtual machines to set up the environment."),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"If you are using Windows or an Intel-based Mac for the ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," practical exercises, you can prepare an Ubuntu desktop environment using virtual machine software such as VirtualBox or VMware. Please make sure to meet the recommended specifications when creating the virtual machine.\nHowever, for those using an M1 Mac, as of the date of writing (February 2022), VirtualBox and VMware are not available. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"Check if macOS apps are optimized for M1 Apple Silicon Mac"),")\nTherefore, if you are not using a cloud environment, you can install UTM, Virtual machines for Mac, to use virtual machines.\n(Purchasing and downloading software from the App Store is a form of donation-based payment. The free version is sufficient as it only differs in automatic updates.)\nThis virtual machine software supports the ",(0,a.kt)("em",{parentName:"p"},"Ubuntu 20.04.3 LTS")," practice operating system, enabling you to perform the exercises on an M1 Mac.")),(0,a.kt)("p",null,"However, since it is not possible to use all the elements described in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/introduction/component"},"Components of MLOps"),", ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will mainly focus on installing the representative open source software and connecting them to each other."),(0,a.kt)("p",null,"It is not meant that installing open source software in ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," is a standard, and we recommend choosing the appropriate tool that fits your situation."),(0,a.kt)("h2",{id:"components"},"Components"),(0,a.kt)("p",null,"The components of the MLOps system that we will make in this article and each version have been verified in the following environment."),(0,a.kt)("p",null,"To facilitate smooth testing, I will explain the setup of the ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,a.kt)("strong",{parentName:"p"},"Client")," as separate entities."),(0,a.kt)("p",null,"The ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," refers to a single desktop with Ubuntu installed.",(0,a.kt)("br",{parentName:"p"}),"\n","The ",(0,a.kt)("strong",{parentName:"p"},"Client")," is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes."),(0,a.kt)("h3",{id:"cluster"},"Cluster"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"Below is the list of software that needs to be installed on the Cluster:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"Below is the list of third-party software that needs to be installed using Helm:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"client"},"Client"),(0,a.kt)("p",null,"The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU: 6 cores"),(0,a.kt)("li",{parentName:"ul"},"RAM: 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK: 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU: NVIDIA GPU (optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/afecfb43.c0aade7a.js b/en/assets/js/afecfb43.7710f425.js similarity index 99% rename from en/assets/js/afecfb43.c0aade7a.js rename to en/assets/js/afecfb43.7710f425.js index 33fd84dc..48dbd582 100644 --- a/en/assets/js/afecfb43.c0aade7a.js +++ b/en/assets/js/afecfb43.7710f425.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7628],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=l.createContext({}),m=function(e){var n=l.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=m(e.components);return l.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),d=m(t),u=a,f=d["".concat(s,".").concat(u)]||d[u]||c[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[d]="string"==typeof e?e:a,r[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>i,toc:()=>m});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"version-1.0/api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/en/docs/1.0/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/en/docs/1.0/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/en/docs/1.0/api-deployment/seldon-children"}},s={},m=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API Creation",id:"api-creation",level:2}],p={toc:m},d="wrapper";function c(e){let{components:n,...o}=e;return(0,a.kt)(d,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"On this page, we will learn how to create an API using a model saved in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-mlflow"},"MLflow Component"),"."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,"The input value for ",(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," is ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),". However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following. "),(0,a.kt)("p",null,"The values that need to be entered in data are as follows."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"The encoding can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"Then the following values will be output."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"If you do the encoding for the entire value, it will look like this:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000="},"http://minio-service.kubeflow.svc:9000=")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false=")),(0,a.kt)("p",null,"You can generate a yaml file through the following command to create the secret."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"Create the secret through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"If performed normally, it will be output as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"Now let's write the yaml file to create Seldon Core."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"There are two major changes compared to the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/api-deployment/seldon-fields"},"Seldon Fields"),":"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"The ",(0,a.kt)("inlineCode",{parentName:"li"},"envFrom")," field is added to the initContainer."),(0,a.kt)("li",{parentName:"ol"},"The address in the args has been changed to ",(0,a.kt)("inlineCode",{parentName:"li"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"),".")),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?"),(0,a.kt)("p",null,"To find the path, go back to MLflow and click on the run, then click on the model, as shown below:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(9484).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"You can use the path obtained from there."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"This process involves providing the environment variables required to access MinIO and download the model. We will use the ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret")," created earlier."),(0,a.kt)("h2",{id:"api-creation"},"API Creation"),(0,a.kt)("p",null,"First, let's generate the YAML file based on the specification defined above."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"Create a seldon pod."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"If it is performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"Now we wait until the pod is up and running properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"If it is outputted similarly to the following, the API has been created normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"You can confirm the execution through the following request on the API created through the CLI."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"If executed normally, you can get the following results."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}c.isMDXComponent=!0},9484:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7628],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=l.createContext({}),m=function(e){var n=l.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=m(e.components);return l.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),d=m(t),u=a,f=d["".concat(s,".").concat(u)]||d[u]||c[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[d]="string"==typeof e?e:a,r[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>i,toc:()=>m});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"version-1.0/api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/en/docs/1.0/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-mlflow.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/en/docs/1.0/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/en/docs/1.0/api-deployment/seldon-children"}},s={},m=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API Creation",id:"api-creation",level:2}],p={toc:m},d="wrapper";function c(e){let{components:n,...o}=e;return(0,a.kt)(d,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"On this page, we will learn how to create an API using a model saved in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/advanced-mlflow"},"MLflow Component"),"."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,"The input value for ",(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," is ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),". However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following. "),(0,a.kt)("p",null,"The values that need to be entered in data are as follows."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"The encoding can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"Then the following values will be output."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"If you do the encoding for the entire value, it will look like this:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000="},"http://minio-service.kubeflow.svc:9000=")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false=")),(0,a.kt)("p",null,"You can generate a yaml file through the following command to create the secret."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"Create the secret through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"If performed normally, it will be output as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"Now let's write the yaml file to create Seldon Core."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"There are two major changes compared to the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/api-deployment/seldon-fields"},"Seldon Fields"),":"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"The ",(0,a.kt)("inlineCode",{parentName:"li"},"envFrom")," field is added to the initContainer."),(0,a.kt)("li",{parentName:"ol"},"The address in the args has been changed to ",(0,a.kt)("inlineCode",{parentName:"li"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"),".")),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?"),(0,a.kt)("p",null,"To find the path, go back to MLflow and click on the run, then click on the model, as shown below:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(9484).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"You can use the path obtained from there."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"This process involves providing the environment variables required to access MinIO and download the model. We will use the ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret")," created earlier."),(0,a.kt)("h2",{id:"api-creation"},"API Creation"),(0,a.kt)("p",null,"First, let's generate the YAML file based on the specification defined above."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"Create a seldon pod."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"If it is performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"Now we wait until the pod is up and running properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"If it is outputted similarly to the following, the API has been created normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"You can confirm the execution through the following request on the API created through the CLI."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"If executed normally, you can get the following results."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}c.isMDXComponent=!0},9484:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file diff --git a/en/assets/js/b0207dc0.8ddf0b5a.js b/en/assets/js/b0207dc0.8698c977.js similarity index 98% rename from en/assets/js/b0207dc0.8ddf0b5a.js rename to en/assets/js/b0207dc0.8698c977.js index 820b2b1f..0675157b 100644 --- a/en/assets/js/b0207dc0.8ddf0b5a.js +++ b/en/assets/js/b0207dc0.8698c977.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6297],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),u=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},m=function(e){var t=u(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),c=u(n),d=a,k=c["".concat(p,".").concat(d)]||c[d]||s[d]||o;return n?r.createElement(k,l(l({ref:t},m),{},{components:n})):r.createElement(k,l({ref:t},m))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=d;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>s,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const o={title:"How to Contribute",sidebar_position:2},l=void 0,i={unversionedId:"how-to-contribute",id:"how-to-contribute",title:"How to Contribute",description:"How to Start",source:"@site/community/how-to-contribute.md",sourceDirName:".",slug:"/how-to-contribute",permalink:"/en/community/how-to-contribute",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/how-to-contribute.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"How to Contribute",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"Community",permalink:"/en/community/community"},next:{title:"Contributors",permalink:"/en/community/contributors"}},p={},u=[{value:"How to Start",id:"how-to-start",level:2},{value:"Git Repo \uc900\ube44",id:"git-repo-\uc900\ube44",level:3},{value:"\ud658\uacbd \uc124\uc815",id:"\ud658\uacbd-\uc124\uc815",level:3},{value:"How to Contribute",id:"how-to-contribute",level:2},{value:"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c",id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c",level:3},{value:"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c",id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c",level:3},{value:"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c",id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c",level:3},{value:"After Pull Request",id:"after-pull-request",level:2}],m={toc:u},c="wrapper";function s(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"how-to-start"},"How to Start"),(0,a.kt)("h3",{id:"git-repo-\uc900\ube44"},"Git Repo \uc900\ube44"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io"},(0,a.kt)("em",{parentName:"a"},"\ubaa8\ub450\uc758 MLOps")," GitHub Repository"),"\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc5ec\ub7ec\ubd84\uc758 \uac1c\uc778 Repository\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Fork"),"\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Forked Repository\ub97c \uc5ec\ub7ec\ubd84\uc758 \uc791\uc5c5 \ud658\uacbd\uc73c\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"git clone"),"\ud569\ub2c8\ub2e4."))),(0,a.kt)("h3",{id:"\ud658\uacbd-\uc124\uc815"},"\ud658\uacbd \uc124\uc815"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ubaa8\ub450\uc758 MLOps\ub294 Hugo \uc640 Node\ub97c \uc774\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"li"}),"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"node & npm"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm --version\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"hugo"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hugo version\n")))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud544\uc694\ud55c node module\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm install\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uac01 \uae00\uc758 \uc77c\uad00\uc131\uc744 \uc704\ud574\uc11c \uc5ec\ub7ec markdown lint\ub97c \uc801\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 test\ub97c \uc9c4\ud589\ud55c \ud6c4 \ucee4\ubc0b\ud569\ub2c8\ub2e4.\ub0b4\uc6a9 \uc218\uc815 \ubc0f \ucd94\uac00 \ud6c4 lint\uac00 \ub9de\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm test\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"lint \ud655\uc778 \uc644\ub8cc \ud6c4 ci \ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm ci\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ub85c\uceec\uc5d0\uc11c \uc2e4\ud589 \ud6c4 \uc218\uc815\ud55c \uae00\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub098\uc624\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm run start\n")))),(0,a.kt)("h2",{id:"how-to-contribute"},"How to Contribute"),(0,a.kt)("h3",{id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c"},"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c"),(0,a.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub294 \uac01 \ucc55\ud130\uc640 \ud3ec\uc2a4\ud2b8\uc758 \uc704\uce58\uc5d0 \ub9de\ub294 weight\ub97c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Introduction: 1xx"),(0,a.kt)("li",{parentName:"ul"},"Setup: 2xx"),(0,a.kt)("li",{parentName:"ul"},"Kubeflow: 3xx"),(0,a.kt)("li",{parentName:"ul"},"API Deployment: 4xx"),(0,a.kt)("li",{parentName:"ul"},"Help: 10xx")),(0,a.kt)("h3",{id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c"},"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c"),(0,a.kt)("p",null,"\uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c Contributor\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'contributors: ["John Doe", "Adam Smith"]\n')),(0,a.kt)("h3",{id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c"},"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c"),(0,a.kt)("p",null,"\ub9cc\uc57d \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec \ud560 \ub54c ",(0,a.kt)("inlineCode",{parentName:"p"},"content/kor/contributors"),"\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc73c\ub85c \ud3f4\ub354\ub97c \uc0dd\uc131\ud55c \ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ub77c\ub294 \ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo kim"),"\uc774 \ubcf8\uc778\uc758 \uc601\uc5b4 \uc774\ub984\uc774\ub77c\uba74, \ud3f4\ub354\uba85\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo-kim"),"\uc73c\ub85c \ud558\uc5ec \ud574\ub2f9 \ud3f4\ub354 \ub0b4\ubd80\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ud30c\uc77c\uc5d0 \ub2e4\uc74c\uc758 \ub0b4\uc6a9\uc744 \uc791\uc131\ud569\ub2c8\ub2e4.\n\ud3f4\ub354\uba85\uc740 \ud558\uc774\ud508(-)\uc73c\ub85c \uc5f0\uacb0\ud55c \uc18c\ubb38\uc790\ub85c, title\uc740 \ub744\uc5b4\uc4f0\uae30\ub97c \ud3ec\ud568\ud55c CamelCase\ub85c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'---\ntitle: "John Doe"\ndraft: false\n---\n')),(0,a.kt)("h2",{id:"after-pull-request"},"After Pull Request"),(0,a.kt)("p",null,"Pull Request\ub97c \uc0dd\uc131\ud558\uba74 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uc790\ub3d9\uc73c\ub85c ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc6b4\uc601\uc9c4\uc5d0\uac8c \ub9ac\ubdf0 \uc694\uccad\uc774 \uc804\ud574\uc9d1\ub2c8\ub2e4. \ucd5c\ub300 \uc77c\uc8fc\uc77c \uc774\ub0b4\ub85c \ud655\uc778 \ud6c4 Comment\ub97c \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4."))}s.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6297],{3905:(e,t,n)=>{n.d(t,{Zo:()=>m,kt:()=>k});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var p=r.createContext({}),u=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},m=function(e){var t=u(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",s={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,p=e.parentName,m=i(e,["components","mdxType","originalType","parentName"]),c=u(n),d=a,k=c["".concat(p,".").concat(d)]||c[d]||s[d]||o;return n?r.createElement(k,l(l({ref:t},m),{},{components:n})):r.createElement(k,l({ref:t},m))}));function k(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=d;var i={};for(var p in t)hasOwnProperty.call(t,p)&&(i[p]=t[p]);i.originalType=e,i[c]="string"==typeof e?e:a,l[1]=i;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>s,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const o={title:"How to Contribute",sidebar_position:2},l=void 0,i={unversionedId:"how-to-contribute",id:"how-to-contribute",title:"How to Contribute",description:"How to Start",source:"@site/community/how-to-contribute.md",sourceDirName:".",slug:"/how-to-contribute",permalink:"/en/community/how-to-contribute",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/how-to-contribute.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"How to Contribute",sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"Community",permalink:"/en/community/community"},next:{title:"Contributors",permalink:"/en/community/contributors"}},p={},u=[{value:"How to Start",id:"how-to-start",level:2},{value:"Git Repo \uc900\ube44",id:"git-repo-\uc900\ube44",level:3},{value:"\ud658\uacbd \uc124\uc815",id:"\ud658\uacbd-\uc124\uc815",level:3},{value:"How to Contribute",id:"how-to-contribute",level:2},{value:"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c",id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c",level:3},{value:"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c",id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c",level:3},{value:"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c",id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c",level:3},{value:"After Pull Request",id:"after-pull-request",level:2}],m={toc:u},c="wrapper";function s(e){let{components:t,...n}=e;return(0,a.kt)(c,(0,r.Z)({},m,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"how-to-start"},"How to Start"),(0,a.kt)("h3",{id:"git-repo-\uc900\ube44"},"Git Repo \uc900\ube44"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io"},(0,a.kt)("em",{parentName:"a"},"\ubaa8\ub450\uc758 MLOps")," GitHub Repository"),"\uc5d0 \uc811\uc18d\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\uc5ec\ub7ec\ubd84\uc758 \uac1c\uc778 Repository\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"Fork"),"\ud569\ub2c8\ub2e4.")),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Forked Repository\ub97c \uc5ec\ub7ec\ubd84\uc758 \uc791\uc5c5 \ud658\uacbd\uc73c\ub85c ",(0,a.kt)("inlineCode",{parentName:"p"},"git clone"),"\ud569\ub2c8\ub2e4."))),(0,a.kt)("h3",{id:"\ud658\uacbd-\uc124\uc815"},"\ud658\uacbd \uc124\uc815"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"\ubaa8\ub450\uc758 MLOps\ub294 Hugo \uc640 Node\ub97c \uc774\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"li"}),"\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \ud1b5\ud574 \ud544\uc694\ud55c \ud328\ud0a4\uc9c0\uac00 \uc124\uce58\ub418\uc5b4 \uc788\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4.")),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"node & npm"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm --version\n"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},"hugo"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"hugo version\n")))),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud544\uc694\ud55c node module\uc744 \uc124\uce58\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm install\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uac01 \uae00\uc758 \uc77c\uad00\uc131\uc744 \uc704\ud574\uc11c \uc5ec\ub7ec markdown lint\ub97c \uc801\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4.",(0,a.kt)("br",{parentName:"p"}),"\n","\ub2e4\uc74c \uba85\ub839\uc5b4\ub97c \uc2e4\ud589\ud574 test\ub97c \uc9c4\ud589\ud55c \ud6c4 \ucee4\ubc0b\ud569\ub2c8\ub2e4.\ub0b4\uc6a9 \uc218\uc815 \ubc0f \ucd94\uac00 \ud6c4 lint\uac00 \ub9de\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm test\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"lint \ud655\uc778 \uc644\ub8cc \ud6c4 ci \ub97c \uc2e4\ud589\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm ci\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"\ub85c\uceec\uc5d0\uc11c \uc2e4\ud589 \ud6c4 \uc218\uc815\ud55c \uae00\uc774 \uc815\uc0c1\uc801\uc73c\ub85c \ub098\uc624\ub294\uc9c0 \ud655\uc778\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"npm run start\n")))),(0,a.kt)("h2",{id:"how-to-contribute"},"How to Contribute"),(0,a.kt)("h3",{id:"1-\uc0c8\ub85c\uc6b4-\ud3ec\uc2a4\ud2b8\ub97c-\uc791\uc131\ud560-\ub54c"},"1. \uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub97c \uc791\uc131\ud560 \ub54c"),(0,a.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub294 \uac01 \ucc55\ud130\uc640 \ud3ec\uc2a4\ud2b8\uc758 \uc704\uce58\uc5d0 \ub9de\ub294 weight\ub97c \uc124\uc815\ud569\ub2c8\ub2e4."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Introduction: 1xx"),(0,a.kt)("li",{parentName:"ul"},"Setup: 2xx"),(0,a.kt)("li",{parentName:"ul"},"Kubeflow: 3xx"),(0,a.kt)("li",{parentName:"ul"},"API Deployment: 4xx"),(0,a.kt)("li",{parentName:"ul"},"Help: 10xx")),(0,a.kt)("h3",{id:"2-\uae30\uc874\uc758-\ud3ec\uc2a4\ud2b8\ub97c-\uc218\uc815\ud560-\ub54c"},"2. \uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c"),(0,a.kt)("p",null,"\uae30\uc874\uc758 \ud3ec\uc2a4\ud2b8\ub97c \uc218\uc815\ud560 \ub54c Contributor\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc744 \uc785\ub825\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'contributors: ["John Doe", "Adam Smith"]\n')),(0,a.kt)("h3",{id:"3-\ud504\ub85c\uc81d\ud2b8\uc5d0-\ucc98\uc74c-\uae30\uc5ec\ud560-\ub54c"},"3. \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec\ud560 \ub54c"),(0,a.kt)("p",null,"\ub9cc\uc57d \ud504\ub85c\uc81d\ud2b8\uc5d0 \ucc98\uc74c \uae30\uc5ec \ud560 \ub54c ",(0,a.kt)("inlineCode",{parentName:"p"},"content/kor/contributors"),"\uc5d0 \ubcf8\uc778\uc758 \uc774\ub984\uc73c\ub85c \ud3f4\ub354\ub97c \uc0dd\uc131\ud55c \ud6c4, ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ub77c\ub294 \ud30c\uc77c\uc744 \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("p",null,"\uc608\ub97c \ub4e4\uc5b4, ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo kim"),"\uc774 \ubcf8\uc778\uc758 \uc601\uc5b4 \uc774\ub984\uc774\ub77c\uba74, \ud3f4\ub354\uba85\uc740 ",(0,a.kt)("inlineCode",{parentName:"p"},"minsoo-kim"),"\uc73c\ub85c \ud558\uc5ec \ud574\ub2f9 \ud3f4\ub354 \ub0b4\ubd80\uc758 ",(0,a.kt)("inlineCode",{parentName:"p"},"_index.md"),"\ud30c\uc77c\uc5d0 \ub2e4\uc74c\uc758 \ub0b4\uc6a9\uc744 \uc791\uc131\ud569\ub2c8\ub2e4.\n\ud3f4\ub354\uba85\uc740 \ud558\uc774\ud508(-)\uc73c\ub85c \uc5f0\uacb0\ud55c \uc18c\ubb38\uc790\ub85c, title\uc740 \ub744\uc5b4\uc4f0\uae30\ub97c \ud3ec\ud568\ud55c CamelCase\ub85c \uc791\uc131\ud569\ub2c8\ub2e4."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-markdown"},'---\ntitle: "John Doe"\ndraft: false\n---\n')),(0,a.kt)("h2",{id:"after-pull-request"},"After Pull Request"),(0,a.kt)("p",null,"Pull Request\ub97c \uc0dd\uc131\ud558\uba74 \ud504\ub85c\uc81d\ud2b8\uc5d0\uc11c\ub294 \uc790\ub3d9\uc73c\ub85c ",(0,a.kt)("em",{parentName:"p"},"\ubaa8\ub450\uc758 MLOps")," \uc6b4\uc601\uc9c4\uc5d0\uac8c \ub9ac\ubdf0 \uc694\uccad\uc774 \uc804\ud574\uc9d1\ub2c8\ub2e4. \ucd5c\ub300 \uc77c\uc8fc\uc77c \uc774\ub0b4\ub85c \ud655\uc778 \ud6c4 Comment\ub97c \ub4dc\ub9b4 \uc608\uc815\uc785\ub2c8\ub2e4."))}s.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/b108acf9.81c43433.js b/en/assets/js/b108acf9.05ffdcef.js similarity index 97% rename from en/assets/js/b108acf9.81c43433.js rename to en/assets/js/b108acf9.05ffdcef.js index d10977df..901a8b9e 100644 --- a/en/assets/js/b108acf9.81c43433.js +++ b/en/assets/js/b108acf9.05ffdcef.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5696],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),u=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},c=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(r),f=o,b=p["".concat(l,".").concat(f)]||p[f]||d[f]||a;return r?n.createElement(b,i(i({ref:t},c),{},{components:r})):n.createElement(b,i({ref:t},c))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline Relates",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/en/docs/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/en/docs/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/en/docs/kubeflow/kubeflow-intro"}},l={},u=[],c={toc:u},p="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipelines are the main reason for using Kubeflow in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"."))}d.isMDXComponent=!0},9268:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5696],{3905:(e,t,r)=>{r.d(t,{Zo:()=>c,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),u=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},c=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,c=s(e,["components","mdxType","originalType","parentName"]),p=u(r),f=o,b=p["".concat(l,".").concat(f)]||p[f]||d[f]||a;return r?n.createElement(b,i(i({ref:t},c),{},{components:r})):n.createElement(b,i({ref:t},c))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline Relates",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/en/docs/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/en/docs/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/en/docs/kubeflow/kubeflow-intro"}},l={},u=[],c={toc:u},p="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipelines are the main reason for using Kubeflow in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"."))}d.isMDXComponent=!0},9268:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/en/assets/js/b3d231d1.e9ff2ae9.js b/en/assets/js/b3d231d1.40acd90d.js similarity index 99% rename from en/assets/js/b3d231d1.e9ff2ae9.js rename to en/assets/js/b3d231d1.40acd90d.js index 7d83567a..99147e89 100644 --- a/en/assets/js/b3d231d1.e9ff2ae9.js +++ b/en/assets/js/b3d231d1.40acd90d.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5988],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},d=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||u[m]||o;return n?a.createElement(k,i(i({ref:t},d),{},{components:n})):a.createElement(k,i({ref:t},d))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:r,i[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/advanced",id:"prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/en/docs/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/advanced.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/en/docs/prerequisites/docker/images"}},s={},c=[{value:"Making a good Docker image",id:"making-a-good-docker-image",level:2},{value:"Considerations to make Docker image:",id:"considerations-to-make-docker-image",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Naming docker tag",id:"naming-docker-tag",level:3},{value:"ETC",id:"etc",level:3},{value:"Several options for docker run",id:"several-options-for-docker-run",level:2},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"Docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"Running docker run as a background process",id:"running-docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],d={toc:c},p="wrapper";function u(e){let{components:t,...o}=e;return(0,r.kt)(p,(0,a.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"making-a-good-docker-image"},"Making a good Docker image"),(0,r.kt)("h3",{id:"considerations-to-make-docker-image"},"Considerations to make Docker image:"),(0,r.kt)("p",null,"When creating a Docker image using a Dockerfile, the ",(0,r.kt)("strong",{parentName:"p"},"order")," of the commands is important.",(0,r.kt)("br",{parentName:"p"}),"\n","This is because Docker images are composed of many Read-Only layers and when building the image, existing layers are ",(0,r.kt)("strong",{parentName:"p"},"cached")," and reused, so if you structure your Dockerfile with this in mind, you can ",(0,r.kt)("strong",{parentName:"p"},"reduce the build time"),"."),(0,r.kt)("p",null,"Each of the ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY")," commands in a Dockerfile are stored as one layer."),(0,r.kt)("p",null,"For example, if we have the following ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,r.kt)("p",null,"If you run the image built with the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash"),", it can be represented in the following layers. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"layers.png",src:n(3966).Z,width:"1080",height:"612"})),(0,r.kt)("p",null,"The topmost R/W layer does not affect the image. In other words, any changes made inside the container are volatile."),(0,r.kt)("p",null,"When a lower layer is changed, all the layers above it need to be rebuilt. Therefore, the order of Dockerfile instructions is important. It is recommended to place the parts that are frequently changed towards the end. (e.g., ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,r.kt)("p",null,"Conversely, parts that are unlikely to change should be placed towards the beginning."),(0,r.kt)("p",null,"If there are parts that are rarely changed but used in multiple places, they can be consolidated. It is advisable to create a separate image for those common parts in advance and use it as a base image."),(0,r.kt)("p",null,"For example, if you want to create separate images for an environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-cpu")," and another environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-gpu"),", you can do the following:\nCreate a base image ",(0,r.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,r.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," that includes Python and other basic packages installed. Then, when creating the images with the CPU and GPU versions of TensorFlow, you can use the base image as the ",(0,r.kt)("inlineCode",{parentName:"p"},"FROM")," instruction and write the separate instructions for installing TensorFlow in each Dockerfile. Managing two Dockerfiles in this way improves readability and reduces build time."),(0,r.kt)("p",null,"Combining layers had performance benefits in older versions of Docker. However, since you cannot guarantee the Docker version in which your Docker containers will run, it is recommended to combine layers for readability purposes. It is best to combine layers that can be combined appropriately."),(0,r.kt)("p",null,"Here is an example of a Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,r.kt)("p",null,"This can be written by combining it as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,r.kt)("p",null,"For convenience, it is better to use ",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore"),".",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore")," is similar to ",(0,r.kt)("inlineCode",{parentName:"p"},".gitignore")," in the sense that it can be excluded when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," just like when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"git add"),". "),(0,r.kt)("p",null,"More information can be found in the ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker Official Documentation"),"."),(0,r.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are both used when you want to execute a command at the runtime of the container. One of them must be present in the Dockerfile."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Difference"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD"),": Easily modifiable when running ",(0,r.kt)("inlineCode",{parentName:"li"},"docker run")," command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": Requires the use of ",(0,r.kt)("inlineCode",{parentName:"li"},"--entrypoint")," to modify")))),(0,r.kt)("p",null,"When ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are used together, ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," typically represents the arguments (parameters) for the command specified in ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT"),"."),(0,r.kt)("p",null,"For example, consider the following Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,r.kt)("p",null,"If you build and run the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the parts marked as comments deactivated, you can get the following results: "),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null}),(0,r.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"NO CMD")),(0,r.kt)("td",{parentName:"tr",align:null},"Error!"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,r.kt)("td",{parentName:"tr",align:null},"x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD x y")),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"In Kubernetes pod, ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," corresponds to the command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD")," corresponds to the arguments")))),(0,r.kt)("h3",{id:"naming-docker-tag"},"Naming docker tag"),(0,r.kt)("p",null,'Recommend not using "latest" as a tag for a Docker image, as it is the default tag name and can be easily overwritten unintentionally.'),(0,r.kt)("p",null,"It is important to ensure uniqueness of one image with one tag for the sake of collaboration and debugging in the production stage.",(0,r.kt)("br",{parentName:"p"}),"\n","Using the same tag for different contents can lead to dangling images, which are not shown in the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," but still take up storage space."),(0,r.kt)("h3",{id:"etc"},"ETC"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Logs and other information are stored separately from the container, not inside it.\nThis is because data written from within the container can be lost at any time."),(0,r.kt)("li",{parentName:"ol"},"Secrets and environment-dependent information should not be written directly into the Dockerfile but should be passed in via environment variables or a .env config file."),(0,r.kt)("li",{parentName:"ol"},"There is a ",(0,r.kt)("strong",{parentName:"li"},"linter")," for Dockerfiles, so it is useful to use it when collaborating.\n",(0,r.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,r.kt)("h2",{id:"several-options-for-docker-run"},"Several options for docker run"),(0,r.kt)("p",null,"When using Docker containers, there are some inconveniences.\nSpecifically, Docker does not store any of the work done within the Docker container by default.\nThis is because Docker containers use isolated file systems. Therefore, it is difficult to share data between multiple Docker containers."),(0,r.kt)("p",null,"To solve this problem, there are two approaches offered by Docker."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"storage.png",src:n(3681).Z,width:"501",height:"255"})),(0,r.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Use the Docker CLI to directly manage a resource called ",(0,r.kt)("inlineCode",{parentName:"li"},"volume"),"."),(0,r.kt)("li",{parentName:"ul"},"Create a specific directory under the Docker area (",(0,r.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") on the host and mount that path to a Docker container.")),(0,r.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Mount a specific path on the host to a Docker container.")),(0,r.kt)("h4",{id:"how-to-use"},"How to use?"),(0,r.kt)("p",null,"The usage is through the same interface, using the ",(0,r.kt)("inlineCode",{parentName:"p"},"-v")," option.",(0,r.kt)("br",{parentName:"p"}),"\n","However, when using volumes, you need to manage them directly by performing commands like ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume rm"),", etc."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Docker volume"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Blind mount"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,r.kt)("p",null,"When developing locally, bind mount can be convenient, but if you want to maintain a clean environment, using Docker volume and explicitly performing create and rm operations can be another approach."),(0,r.kt)("p",null,"The way storage is provided in Kubernetes ultimately relies on Docker's bind mount as well."),(0,r.kt)("h3",{id:"docker-run-with-resource-limit"},"Docker run with resource limit"),(0,r.kt)("p",null,"Basically, docker containers can ",(0,r.kt)("strong",{parentName:"p"},"fully utilize the CPU and memory resources of the host OS"),". However, when using this, depending on the resource situation of the host OS, docker containers may abnormally terminate due to issues such as ",(0,r.kt)("strong",{parentName:"p"},"OOM"),".\nTo address this problem, docker provides the ",(0,r.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"option")," which allows you to ",(0,r.kt)("strong",{parentName:"p"},"limit the usage of CPU and memory")," when running the docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,r.kt)("p",null,"After running the Docker above, you can check the usage through the 'docker stats' command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,r.kt)("p",null,"In Kubernetes, when you limit the CPU and memory resources of a pod resource, it is provided using this technique."),(0,r.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,r.kt)("p",null,"If there is a need to keep a particular container running continuously, the ",(0,r.kt)("inlineCode",{parentName:"p"},"--restart=always")," option is provided to try to re-create the container immediately after it is terminated."),(0,r.kt)("p",null,"After entering the option, run the docker."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,r.kt)("p",null,"Run ",(0,r.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps")," to check if it is restarting.\nIf it is running normally, ",(0,r.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," will be printed in STATUS."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Provides options such as "on-failure with max retries" and "always"')))),(0,r.kt)("p",null,"When specifying the restart option for a job resource in Kubernetes, this approach is used."),(0,r.kt)("h3",{id:"running-docker-run-as-a-background-process"},"Running docker run as a background process"),(0,r.kt)("p",null,"By default, when running a Docker container, it is executed as a foreground process. This means that the terminal that launched the container is automatically attached to it, preventing you from running other commands."),(0,r.kt)("p",null,"Let's try an example. Open two terminals, and in one terminal, continuously monitor ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", while in the other terminal, execute the following commands one by one and observe the behavior."),(0,r.kt)("h4",{id:"first-practice"},"First Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"You must remain stopped for 10 seconds and you cannot perform any other commands from that container. After 10 seconds, you can check in docker ps that the container has terminated."),(0,r.kt)("h4",{id:"second-practice"},"Second Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"After that, press ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + q"),"."),(0,r.kt)("p",null,"Now you can perform other commands in that terminal, and you can also see that the container is still alive for up to 10 seconds with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),'. This situation, where you exit from the Docker container, is called "detached". Docker provides an option to run containers in detached mode, which allows you to run the container in the background while executing the ',(0,r.kt)("inlineCode",{parentName:"p"},"run")," command."),(0,r.kt)("h4",{id:"third-practice"},"Third Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,r.kt)("p",null,"In detached mode, you can perform other actions in the terminal that executed the command."),(0,r.kt)("p",null,"It is good to use detached mode appropriately according to the situation.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, when developing a backend API server that communicates with the DB, the backend API server needs to be constantly checked with hot-loading while changing the source code, but the DB does not need to be monitored, so it can be executed as follows.",(0,r.kt)("br",{parentName:"p"}),"\n","Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs."),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}u.isMDXComponent=!0},3966:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},3681:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5988],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},d=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||u[m]||o;return n?a.createElement(k,i(i({ref:t},d),{},{components:n})):a.createElement(k,i({ref:t},d))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:r,i[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/advanced",id:"prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/en/docs/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/prerequisites/docker/advanced.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/en/docs/prerequisites/docker/images"}},s={},c=[{value:"Making a good Docker image",id:"making-a-good-docker-image",level:2},{value:"Considerations to make Docker image:",id:"considerations-to-make-docker-image",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Naming docker tag",id:"naming-docker-tag",level:3},{value:"ETC",id:"etc",level:3},{value:"Several options for docker run",id:"several-options-for-docker-run",level:2},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"Docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"Running docker run as a background process",id:"running-docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],d={toc:c},p="wrapper";function u(e){let{components:t,...o}=e;return(0,r.kt)(p,(0,a.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"making-a-good-docker-image"},"Making a good Docker image"),(0,r.kt)("h3",{id:"considerations-to-make-docker-image"},"Considerations to make Docker image:"),(0,r.kt)("p",null,"When creating a Docker image using a Dockerfile, the ",(0,r.kt)("strong",{parentName:"p"},"order")," of the commands is important.",(0,r.kt)("br",{parentName:"p"}),"\n","This is because Docker images are composed of many Read-Only layers and when building the image, existing layers are ",(0,r.kt)("strong",{parentName:"p"},"cached")," and reused, so if you structure your Dockerfile with this in mind, you can ",(0,r.kt)("strong",{parentName:"p"},"reduce the build time"),"."),(0,r.kt)("p",null,"Each of the ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY")," commands in a Dockerfile are stored as one layer."),(0,r.kt)("p",null,"For example, if we have the following ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,r.kt)("p",null,"If you run the image built with the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash"),", it can be represented in the following layers. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"layers.png",src:n(3966).Z,width:"1080",height:"612"})),(0,r.kt)("p",null,"The topmost R/W layer does not affect the image. In other words, any changes made inside the container are volatile."),(0,r.kt)("p",null,"When a lower layer is changed, all the layers above it need to be rebuilt. Therefore, the order of Dockerfile instructions is important. It is recommended to place the parts that are frequently changed towards the end. (e.g., ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,r.kt)("p",null,"Conversely, parts that are unlikely to change should be placed towards the beginning."),(0,r.kt)("p",null,"If there are parts that are rarely changed but used in multiple places, they can be consolidated. It is advisable to create a separate image for those common parts in advance and use it as a base image."),(0,r.kt)("p",null,"For example, if you want to create separate images for an environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-cpu")," and another environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-gpu"),", you can do the following:\nCreate a base image ",(0,r.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,r.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," that includes Python and other basic packages installed. Then, when creating the images with the CPU and GPU versions of TensorFlow, you can use the base image as the ",(0,r.kt)("inlineCode",{parentName:"p"},"FROM")," instruction and write the separate instructions for installing TensorFlow in each Dockerfile. Managing two Dockerfiles in this way improves readability and reduces build time."),(0,r.kt)("p",null,"Combining layers had performance benefits in older versions of Docker. However, since you cannot guarantee the Docker version in which your Docker containers will run, it is recommended to combine layers for readability purposes. It is best to combine layers that can be combined appropriately."),(0,r.kt)("p",null,"Here is an example of a Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,r.kt)("p",null,"This can be written by combining it as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,r.kt)("p",null,"For convenience, it is better to use ",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore"),".",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore")," is similar to ",(0,r.kt)("inlineCode",{parentName:"p"},".gitignore")," in the sense that it can be excluded when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," just like when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"git add"),". "),(0,r.kt)("p",null,"More information can be found in the ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker Official Documentation"),"."),(0,r.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are both used when you want to execute a command at the runtime of the container. One of them must be present in the Dockerfile."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Difference"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD"),": Easily modifiable when running ",(0,r.kt)("inlineCode",{parentName:"li"},"docker run")," command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": Requires the use of ",(0,r.kt)("inlineCode",{parentName:"li"},"--entrypoint")," to modify")))),(0,r.kt)("p",null,"When ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are used together, ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," typically represents the arguments (parameters) for the command specified in ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT"),"."),(0,r.kt)("p",null,"For example, consider the following Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,r.kt)("p",null,"If you build and run the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the parts marked as comments deactivated, you can get the following results: "),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null}),(0,r.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"NO CMD")),(0,r.kt)("td",{parentName:"tr",align:null},"Error!"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,r.kt)("td",{parentName:"tr",align:null},"x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD x y")),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"In Kubernetes pod, ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," corresponds to the command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD")," corresponds to the arguments")))),(0,r.kt)("h3",{id:"naming-docker-tag"},"Naming docker tag"),(0,r.kt)("p",null,'Recommend not using "latest" as a tag for a Docker image, as it is the default tag name and can be easily overwritten unintentionally.'),(0,r.kt)("p",null,"It is important to ensure uniqueness of one image with one tag for the sake of collaboration and debugging in the production stage.",(0,r.kt)("br",{parentName:"p"}),"\n","Using the same tag for different contents can lead to dangling images, which are not shown in the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," but still take up storage space."),(0,r.kt)("h3",{id:"etc"},"ETC"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Logs and other information are stored separately from the container, not inside it.\nThis is because data written from within the container can be lost at any time."),(0,r.kt)("li",{parentName:"ol"},"Secrets and environment-dependent information should not be written directly into the Dockerfile but should be passed in via environment variables or a .env config file."),(0,r.kt)("li",{parentName:"ol"},"There is a ",(0,r.kt)("strong",{parentName:"li"},"linter")," for Dockerfiles, so it is useful to use it when collaborating.\n",(0,r.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,r.kt)("h2",{id:"several-options-for-docker-run"},"Several options for docker run"),(0,r.kt)("p",null,"When using Docker containers, there are some inconveniences.\nSpecifically, Docker does not store any of the work done within the Docker container by default.\nThis is because Docker containers use isolated file systems. Therefore, it is difficult to share data between multiple Docker containers."),(0,r.kt)("p",null,"To solve this problem, there are two approaches offered by Docker."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"storage.png",src:n(3681).Z,width:"501",height:"255"})),(0,r.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Use the Docker CLI to directly manage a resource called ",(0,r.kt)("inlineCode",{parentName:"li"},"volume"),"."),(0,r.kt)("li",{parentName:"ul"},"Create a specific directory under the Docker area (",(0,r.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") on the host and mount that path to a Docker container.")),(0,r.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Mount a specific path on the host to a Docker container.")),(0,r.kt)("h4",{id:"how-to-use"},"How to use?"),(0,r.kt)("p",null,"The usage is through the same interface, using the ",(0,r.kt)("inlineCode",{parentName:"p"},"-v")," option.",(0,r.kt)("br",{parentName:"p"}),"\n","However, when using volumes, you need to manage them directly by performing commands like ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume rm"),", etc."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Docker volume"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Blind mount"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,r.kt)("p",null,"When developing locally, bind mount can be convenient, but if you want to maintain a clean environment, using Docker volume and explicitly performing create and rm operations can be another approach."),(0,r.kt)("p",null,"The way storage is provided in Kubernetes ultimately relies on Docker's bind mount as well."),(0,r.kt)("h3",{id:"docker-run-with-resource-limit"},"Docker run with resource limit"),(0,r.kt)("p",null,"Basically, docker containers can ",(0,r.kt)("strong",{parentName:"p"},"fully utilize the CPU and memory resources of the host OS"),". However, when using this, depending on the resource situation of the host OS, docker containers may abnormally terminate due to issues such as ",(0,r.kt)("strong",{parentName:"p"},"OOM"),".\nTo address this problem, docker provides the ",(0,r.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"option")," which allows you to ",(0,r.kt)("strong",{parentName:"p"},"limit the usage of CPU and memory")," when running the docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,r.kt)("p",null,"After running the Docker above, you can check the usage through the 'docker stats' command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,r.kt)("p",null,"In Kubernetes, when you limit the CPU and memory resources of a pod resource, it is provided using this technique."),(0,r.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,r.kt)("p",null,"If there is a need to keep a particular container running continuously, the ",(0,r.kt)("inlineCode",{parentName:"p"},"--restart=always")," option is provided to try to re-create the container immediately after it is terminated."),(0,r.kt)("p",null,"After entering the option, run the docker."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,r.kt)("p",null,"Run ",(0,r.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps")," to check if it is restarting.\nIf it is running normally, ",(0,r.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," will be printed in STATUS."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Provides options such as "on-failure with max retries" and "always"')))),(0,r.kt)("p",null,"When specifying the restart option for a job resource in Kubernetes, this approach is used."),(0,r.kt)("h3",{id:"running-docker-run-as-a-background-process"},"Running docker run as a background process"),(0,r.kt)("p",null,"By default, when running a Docker container, it is executed as a foreground process. This means that the terminal that launched the container is automatically attached to it, preventing you from running other commands."),(0,r.kt)("p",null,"Let's try an example. Open two terminals, and in one terminal, continuously monitor ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", while in the other terminal, execute the following commands one by one and observe the behavior."),(0,r.kt)("h4",{id:"first-practice"},"First Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"You must remain stopped for 10 seconds and you cannot perform any other commands from that container. After 10 seconds, you can check in docker ps that the container has terminated."),(0,r.kt)("h4",{id:"second-practice"},"Second Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"After that, press ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + q"),"."),(0,r.kt)("p",null,"Now you can perform other commands in that terminal, and you can also see that the container is still alive for up to 10 seconds with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),'. This situation, where you exit from the Docker container, is called "detached". Docker provides an option to run containers in detached mode, which allows you to run the container in the background while executing the ',(0,r.kt)("inlineCode",{parentName:"p"},"run")," command."),(0,r.kt)("h4",{id:"third-practice"},"Third Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,r.kt)("p",null,"In detached mode, you can perform other actions in the terminal that executed the command."),(0,r.kt)("p",null,"It is good to use detached mode appropriately according to the situation.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, when developing a backend API server that communicates with the DB, the backend API server needs to be constantly checked with hot-loading while changing the source code, but the DB does not need to be monitored, so it can be executed as follows.",(0,r.kt)("br",{parentName:"p"}),"\n","Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs."),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}u.isMDXComponent=!0},3966:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},3681:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file diff --git a/en/assets/js/b54de702.659327f3.js b/en/assets/js/b54de702.bbc4c61c.js similarity index 99% rename from en/assets/js/b54de702.659327f3.js rename to en/assets/js/b54de702.bbc4c61c.js index f15b8d5f..32ec9489 100644 --- a/en/assets/js/b54de702.659327f3.js +++ b/en/assets/js/b54de702.bbc4c61c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3856],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>m});var i=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function s(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var u=i.createContext({}),o=function(e){var t=i.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},d=function(e){var t=o(e.components);return i.createElement(u.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},b=i.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,u=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=o(n),b=a,m=p["".concat(u,".").concat(b)]||p[b]||k[b]||r;return n?i.createElement(m,s(s({ref:t},d),{},{components:n})):i.createElement(m,s({ref:t},d))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,s=new Array(r);s[0]=b;var l={};for(var u in t)hasOwnProperty.call(t,u)&&(l[u]=t[u]);l.originalType=e,l[p]="string"==typeof e?e:a,s[1]=l;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>s,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>o});var i=n(7462),a=(n(7294),n(3905));const r={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:3},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2}],d={toc:o},p="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,i.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,a.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,a.kt)("p",null,"Install the v1.24.0 version of the Minikube binary to use Minikube."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,a.kt)("p",null,"Check if it is installed properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,a.kt)("p",null,"If this message appears, it means the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,a.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,a.kt)("p",null,"Now let's build the Kubernetes cluster using Minikube.\nTo facilitate the smooth use of GPUs and communication between cluster and client, Minikube is run using the ",(0,a.kt)("inlineCode",{parentName:"p"},"driver=none")," option. Please note that this option must be run as root user. "),(0,a.kt)("p",null,"Switch to root user."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,a.kt)("p",null,"Run ",(0,a.kt)("inlineCode",{parentName:"p"},"minikube start")," to build the Kubernetes cluster for Kubeflow's smooth operation, specifying the Kubernetes version as v1.21.7 and adding ",(0,a.kt)("inlineCode",{parentName:"p"},"--extra-config"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,a.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,a.kt)("p",null,"When installing Minikube, there are default addons that are installed. We will disable any addons that we do not intend to use."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,a.kt)("p",null,"Confirm that all addons are disabled."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,a.kt)("p",null,"If the following message appears, it means that the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,a.kt)("h3",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,a.kt)("p",null,"Now, let's install the necessary tools for smooth usage of Kubernetes on the ",(0,a.kt)("strong",{parentName:"p"},"client")," machine. If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are not separated, please note that you need to perform all the operations as the root user."),(0,a.kt)("p",null,"If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are separated, first, we need to retrieve the Kubernetes administrator credentials from the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," to the ",(0,a.kt)("strong",{parentName:"p"},"client"),"."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Check the config on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),":"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Cluster node\nminikube kubectl -- config view --flatten\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"The following information will be displayed:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,a.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,a.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"")))),(0,a.kt)("ol",{start:3},(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Create the ",(0,a.kt)("inlineCode",{parentName:"p"},".kube")," folder on the ",(0,a.kt)("strong",{parentName:"p"},"client")," node:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Client node\nmkdir -p /home/$USER/.kube\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Paste the information obtained from Step 2 into the file and save it:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,a.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,a.kt)("p",null,"Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"If this message appears, it means that the installation has completed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[3856],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>m});var i=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function s(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var u=i.createContext({}),o=function(e){var t=i.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},d=function(e){var t=o(e.components);return i.createElement(u.Provider,{value:t},e.children)},p="mdxType",k={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},b=i.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,u=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=o(n),b=a,m=p["".concat(u,".").concat(b)]||p[b]||k[b]||r;return n?i.createElement(m,s(s({ref:t},d),{},{components:n})):i.createElement(m,s({ref:t},d))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,s=new Array(r);s[0]=b;var l={};for(var u in t)hasOwnProperty.call(t,u)&&(l[u]=t[u]);l.originalType=e,l[p]="string"==typeof e?e:a,s[1]=l;for(var o=2;o{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>s,default:()=>k,frontMatter:()=>r,metadata:()=>l,toc:()=>o});var i=n(7462),a=(n(7294),n(3905));const r={title:"4.2. Minikube",description:"",sidebar_position:2,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,l={unversionedId:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",id:"setup-kubernetes/install-kubernetes/kubernetes-with-minikube",title:"4.2. Minikube",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",sourceDirName:"setup-kubernetes/install-kubernetes",slug:"/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"4.2. Minikube",description:"",sidebar_position:2,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4.3. Kubeadm",permalink:"/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm"},next:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/setup-kubernetes/install-kubernetes-module"}},u={},o=[{value:"1. Prerequisite",id:"1-prerequisite",level:2},{value:"Minikube binary",id:"minikube-binary",level:3},{value:"2. Setup Kubernetes Cluster",id:"2-setup-kubernetes-cluster",level:2},{value:"Disable default addons",id:"disable-default-addons",level:3},{value:"3. Setup Kubernetes Client",id:"3-setup-kubernetes-client",level:3},{value:"4. Install Kubernetes Default Modules",id:"4-install-kubernetes-default-modules",level:2},{value:"5. Verify Successful Installation",id:"5-verify-successful-installation",level:2}],d={toc:o},p="wrapper";function k(e){let{components:t,...n}=e;return(0,a.kt)(p,(0,i.Z)({},d,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"1-prerequisite"},"1. Prerequisite"),(0,a.kt)("p",null,"Before setting up a Kubernetes cluster, install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),"."),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-prerequisite"},"Install Prerequisite")," to install the necessary components on the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," before installing Kubernetes."),(0,a.kt)("h3",{id:"minikube-binary"},"Minikube binary"),(0,a.kt)("p",null,"Install the v1.24.0 version of the Minikube binary to use Minikube."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"wget https://github.com/kubernetes/minikube/releases/download/v1.24.0/minikube-linux-amd64\nsudo install minikube-linux-amd64 /usr/local/bin/minikube\n")),(0,a.kt)("p",null,"Check if it is installed properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube version\n")),(0,a.kt)("p",null,"If this message appears, it means the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ minikube version\nminikube version: v1.24.0\ncommit: 76b94fb3c4e8ac5062daf70d60cf03ddcc0a741b\n")),(0,a.kt)("h2",{id:"2-setup-kubernetes-cluster"},"2. Setup Kubernetes Cluster"),(0,a.kt)("p",null,"Now let's build the Kubernetes cluster using Minikube.\nTo facilitate the smooth use of GPUs and communication between cluster and client, Minikube is run using the ",(0,a.kt)("inlineCode",{parentName:"p"},"driver=none")," option. Please note that this option must be run as root user. "),(0,a.kt)("p",null,"Switch to root user."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"sudo su\n")),(0,a.kt)("p",null,"Run ",(0,a.kt)("inlineCode",{parentName:"p"},"minikube start")," to build the Kubernetes cluster for Kubeflow's smooth operation, specifying the Kubernetes version as v1.21.7 and adding ",(0,a.kt)("inlineCode",{parentName:"p"},"--extra-config"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube start --driver=none \\\n --kubernetes-version=v1.21.7 \\\n --extra-config=apiserver.service-account-signing-key-file=/var/lib/minikube/certs/sa.key \\\n --extra-config=apiserver.service-account-issuer=kubernetes.default.svc\n")),(0,a.kt)("h3",{id:"disable-default-addons"},"Disable default addons"),(0,a.kt)("p",null,"When installing Minikube, there are default addons that are installed. We will disable any addons that we do not intend to use."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons disable storage-provisioner\nminikube addons disable default-storageclass\n")),(0,a.kt)("p",null,"Confirm that all addons are disabled."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"minikube addons list\n")),(0,a.kt)("p",null,"If the following message appears, it means that the installation was successful."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"root@ubuntu:/home/mlops# minikube addons list\n|-----------------------------|----------|--------------|-----------------------|\n| ADDON NAME | PROFILE | STATUS | MAINTAINER |\n|-----------------------------|----------|--------------|-----------------------|\n| ambassador | minikube | disabled | unknown (third-party) |\n| auto-pause | minikube | disabled | google |\n| csi-hostpath-driver | minikube | disabled | kubernetes |\n| dashboard | minikube | disabled | kubernetes |\n| default-storageclass | minikube | disabled | kubernetes |\n| efk | minikube | disabled | unknown (third-party) |\n| freshpod | minikube | disabled | google |\n| gcp-auth | minikube | disabled | google |\n| gvisor | minikube | disabled | google |\n| helm-tiller | minikube | disabled | unknown (third-party) |\n| ingress | minikube | disabled | unknown (third-party) |\n| ingress-dns | minikube | disabled | unknown (third-party) |\n| istio | minikube | disabled | unknown (third-party) |\n| istio-provisioner | minikube | disabled | unknown (third-party) |\n| kubevirt | minikube | disabled | unknown (third-party) |\n| logviewer | minikube | disabled | google |\n| metallb | minikube | disabled | unknown (third-party) |\n| metrics-server | minikube | disabled | kubernetes |\n| nvidia-driver-installer | minikube | disabled | google |\n| nvidia-gpu-device-plugin | minikube | disabled | unknown (third-party) |\n| olm | minikube | disabled | unknown (third-party) |\n| pod-security-policy | minikube | disabled | unknown (third-party) |\n| portainer | minikube | disabled | portainer.io |\n| registry | minikube | disabled | google |\n| registry-aliases | minikube | disabled | unknown (third-party) |\n| registry-creds | minikube | disabled | unknown (third-party) |\n| storage-provisioner | minikube | disabled | kubernetes |\n| storage-provisioner-gluster | minikube | disabled | unknown (third-party) |\n| volumesnapshots | minikube | disabled | kubernetes |\n|-----------------------------|----------|--------------|-----------------------|\n")),(0,a.kt)("h3",{id:"3-setup-kubernetes-client"},"3. Setup Kubernetes Client"),(0,a.kt)("p",null,"Now, let's install the necessary tools for smooth usage of Kubernetes on the ",(0,a.kt)("strong",{parentName:"p"},"client")," machine. If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are not separated, please note that you need to perform all the operations as the root user."),(0,a.kt)("p",null,"If the ",(0,a.kt)("strong",{parentName:"p"},"client")," and ",(0,a.kt)("strong",{parentName:"p"},"cluster")," nodes are separated, first, we need to retrieve the Kubernetes administrator credentials from the ",(0,a.kt)("strong",{parentName:"p"},"cluster")," to the ",(0,a.kt)("strong",{parentName:"p"},"client"),"."),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Check the config on the ",(0,a.kt)("strong",{parentName:"p"},"cluster"),":"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Cluster node\nminikube kubectl -- config view --flatten\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"The following information will be displayed:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\nclusters:\n")))),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"cluster:\ncertificate-authority-data: LS0tLS1CRUd....\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: cluster_info\nserver: https://192.168.0.62:8443\n"))," name: minikube\ncontexts:"),(0,a.kt)("li",{parentName:"ul"},"context:\ncluster: minikube\nextensions:",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"- extension:\n last-update: Mon, 06 Dec 2021 06:55:46 UTC\n provider: minikube.sigs.k8s.io\n version: v1.24.0\n name: context_info\nnamespace: default\nuser: minikube\n"))," name: minikube\ncurrent-context: minikube\nkind: Config\npreferences: {}\nusers:"),(0,a.kt)("li",{parentName:"ul"},"name: minikube\nuser:\nclient-certificate-data: LS0tLS1CRUdJTi....\nclient-key-data: LS0tLS1CRUdJTiBSU0....",(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre"},"")))),(0,a.kt)("ol",{start:3},(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Create the ",(0,a.kt)("inlineCode",{parentName:"p"},".kube")," folder on the ",(0,a.kt)("strong",{parentName:"p"},"client")," node:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"# Client node\nmkdir -p /home/$USER/.kube\n"))),(0,a.kt)("li",{parentName:"ol"},(0,a.kt)("p",{parentName:"li"},"Paste the information obtained from Step 2 into the file and save it:"),(0,a.kt)("pre",{parentName:"li"},(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"vi /home/$USER/.kube/config\n")))),(0,a.kt)("h2",{id:"4-install-kubernetes-default-modules"},"4. Install Kubernetes Default Modules"),(0,a.kt)("p",null,"Please refer to ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/install-kubernetes-module"},"Setup Kubernetes Modules")," to install the following components:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"helm"),(0,a.kt)("li",{parentName:"ul"},"kustomize"),(0,a.kt)("li",{parentName:"ul"},"CSI plugin"),(0,a.kt)("li",{parentName:"ul"},"[Optional]"," nvidia-docker, nvidia-device-plugin")),(0,a.kt)("h2",{id:"5-verify-successful-installation"},"5. Verify Successful Installation"),(0,a.kt)("p",null,"Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get nodes -o wide\n")),(0,a.kt)("p",null,"If this message appears, it means that the installation has completed normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"NAME STATUS ROLES AGE VERSION INTERNAL-IP EXTERNAL-IP OS-IMAGE KERNEL-VERSION CONTAINER-RUNTIME\nubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11\n")))}k.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/bc53d220.5f9a90b4.js b/en/assets/js/bc53d220.69b1938b.js similarity index 97% rename from en/assets/js/bc53d220.5f9a90b4.js rename to en/assets/js/bc53d220.69b1938b.js index 9fbaae6b..f0cb27ee 100644 --- a/en/assets/js/bc53d220.5f9a90b4.js +++ b/en/assets/js/bc53d220.69b1938b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5134],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),u=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=u(e.components);return r.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=u(n),f=o,b=c["".concat(l,".").concat(f)]||c[f]||d[f]||i;return n?r.createElement(b,a(a({ref:t},p),{},{components:n})):r.createElement(b,a({ref:t},p))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,a[1]=s;for(var u=2;u{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>a,default:()=>d,frontMatter:()=>i,metadata:()=>s,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const i={title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},a=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"version-1.0/kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline Relates",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/en/docs/1.0/kubeflow/kubeflow-intro"}},l={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,r.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:n(7173).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipelines are the main reason for using Kubeflow in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"."))}d.isMDXComponent=!0},7173:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5134],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=r.createContext({}),u=function(e){var t=r.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},p=function(e){var t=u(e.components);return r.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=u(n),f=o,b=c["".concat(l,".").concat(f)]||c[f]||d[f]||i;return n?r.createElement(b,a(a({ref:t},p),{},{components:n})):r.createElement(b,a({ref:t},p))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,a[1]=s;for(var u=2;u{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>a,default:()=>d,frontMatter:()=>i,metadata:()=>s,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const i={title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},a=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments-and-others",id:"version-1.0/kubeflow-dashboard-guide/experiments-and-others",title:"6. Kubeflow Pipeline Relates",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments-and-others",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/experiments-and-others.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. Kubeflow Pipeline Relates",description:"",sidebar_position:6,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Experiments(AutoML)",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments"},next:{title:"1. Kubeflow Introduction",permalink:"/en/docs/1.0/kubeflow/kubeflow-intro"}},l={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,r.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:n(7173).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,"Kubeflow Pipelines are the main reason for using Kubeflow in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in ",(0,o.kt)("a",{parentName:"p",href:"../kubeflow/kubeflow-intro"},"3.Kubeflow"),"."))}d.isMDXComponent=!0},7173:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/en/assets/js/be794740.fb08cea8.js b/en/assets/js/be794740.3d9fa37e.js similarity index 99% rename from en/assets/js/be794740.fb08cea8.js rename to en/assets/js/be794740.3d9fa37e.js index 56e58bfe..8f6b4fe2 100644 --- a/en/assets/js/be794740.fb08cea8.js +++ b/en/assets/js/be794740.3d9fa37e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2513],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,s=e.originalType,i=e.parentName,p=r(e,["components","mdxType","originalType","parentName"]),c=d(t),m=l,g=c["".concat(i,".").concat(m)]||c[m]||u[m]||s;return t?a.createElement(g,o(o({ref:n},p),{},{components:t})):a.createElement(g,o({ref:n},p))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var s=t.length,o=new Array(s);o[0]=m;var r={};for(var i in n)hasOwnProperty.call(n,i)&&(r[i]=n[i]);r.originalType=e,r[c]="string"==typeof e?e:l,o[1]=r;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>s,metadata:()=>r,toc:()=>d});var a=t(7462),l=(t(7294),t(3905));const s={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},o=void 0,r={unversionedId:"api-deployment/seldon-iris",id:"version-1.0/api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/en/docs/1.0/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-iris.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/en/docs/1.0/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/en/docs/1.0/api-deployment/seldon-pg"}},i={},d=[{value:"Deploy with SeldonDeployment",id:"deploy-with-seldondeployment",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:4},{value:"2. Define Spec",id:"2-define-spec",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Accessing Swagger",id:"1-accessing-swagger",level:3},{value:"2. Selecting Swagger Predictions",id:"2-selecting-swagger-predictions",level:3},{value:"3. Choosing Try it out",id:"3-choosing-try-it-out",level:3},{value:"4. Inputting data in the Request body",id:"4-inputting-data-in-the-request-body",level:3},{value:"5. Check the inference results",id:"5-check-the-inference-results",level:3},{value:"Using CLI",id:"using-cli",level:2}],p={toc:d},c="wrapper";function u(e){let{components:n,...s}=e;return(0,l.kt)(c,(0,a.Z)({},p,s,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"deploy-with-seldondeployment"},"Deploy with SeldonDeployment"),(0,l.kt)("p",null,"Let's deploy our trained model as an API using SeldonDeployment. SeldonDeployment is a custom resource definition (CRD) defined to deploy models as REST/gRPC servers on Kubernetes."),(0,l.kt)("h4",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"We will conduct the SeldonDeployment related practice in a new namespace called seldon-deploy. After creating the namespace, set seldon-deploy as the current namespace."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-define-spec"},"2. Define Spec"),(0,l.kt)("p",null,"Generate a yaml file to deploy SeldonDeployment.\nIn this page, we will use a publicly available iris model.\nBecause this iris model is trained through the sklearn framework, we use SKLEARN_SERVER."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"Deploy yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"Check if the deployment was successful through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"If everyone runs, similar results will be printed."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"Now, send a inference request to the deployed model to get the inference result. The API created by the SeldonDeployment follows the following rule:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-seldon"},"Since Seldon Core was installed with Ambassador as the Ingress Controller"),", all APIs created by SeldonDeployment can be requested through the Ambassador Ingress gateway."),(0,l.kt)("p",null,"Therefore, first set the url of the Ambassador Ingress Gateway as an environment variable."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"Check the set url."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"It should be outputted similarly as follows, and if set through the cloud, you can check that internal IP address is set."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"This refers to the ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," where the SeldonDeployment is deployed and used to define the values defined in the metadata when defining the spec."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"In the example above, ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," is seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," is sklearn."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"In SeldonDeployment, the commonly used ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name")," has two options:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"The detailed usage of each method is explained below."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"First, let's explore how to use the doc method, which allows access to the Swagger generated by Seldon."),(0,l.kt)("h3",{id:"1-accessing-swagger"},"1. Accessing Swagger"),(0,l.kt)("p",null,"According to the provided ingress URL rules, you can access the Swagger documentation using the following URL:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(2909).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-selecting-swagger-predictions"},"2. Selecting Swagger Predictions"),(0,l.kt)("p",null,"In the Swagger UI, select the ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," endpoint."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(1087).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-choosing-try-it-out"},"3. Choosing ",(0,l.kt)("em",{parentName:"h3"},"Try it out")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(5197).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-inputting-data-in-the-request-body"},"4. Inputting data in the Request body"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(6669).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"Enter the following data into the Request body."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-check-the-inference-results"},"5. Check the inference results"),(0,l.kt)("p",null,"You can click the ",(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," button to obtain the inference result."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(8503).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"If everything is executed successfully, you will obtain the following inference result."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"Also, you can use http client CLI tools such as curl to make API requests.\nFor example, requesting ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions")," as follows"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"You can confirm that the following response is outputted normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}u.isMDXComponent=!0},2909:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},1087:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},5197:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},6669:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},8503:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2513],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>g});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},m=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,s=e.originalType,i=e.parentName,p=r(e,["components","mdxType","originalType","parentName"]),c=d(t),m=l,g=c["".concat(i,".").concat(m)]||c[m]||u[m]||s;return t?a.createElement(g,o(o({ref:n},p),{},{components:t})):a.createElement(g,o({ref:n},p))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var s=t.length,o=new Array(s);o[0]=m;var r={};for(var i in n)hasOwnProperty.call(n,i)&&(r[i]=n[i]);r.originalType=e,r[c]="string"==typeof e?e:l,o[1]=r;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>o,default:()=>u,frontMatter:()=>s,metadata:()=>r,toc:()=>d});var a=t(7462),l=(t(7294),t(3905));const s={title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang","SeungTae Kim"]},o=void 0,r={unversionedId:"api-deployment/seldon-iris",id:"version-1.0/api-deployment/seldon-iris",title:"2. Deploy SeldonDeployment",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-iris.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-iris",permalink:"/en/docs/1.0/api-deployment/seldon-iris",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-iris.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Deploy SeldonDeployment",description:"",sidebar_position:2,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. What is API Deployment?",permalink:"/en/docs/1.0/api-deployment/what-is-api-deployment"},next:{title:"3. Seldon Monitoring",permalink:"/en/docs/1.0/api-deployment/seldon-pg"}},i={},d=[{value:"Deploy with SeldonDeployment",id:"deploy-with-seldondeployment",level:2},{value:"1. Prerequisites",id:"1-prerequisites",level:4},{value:"2. Define Spec",id:"2-define-spec",level:3},{value:"Ingress URL",id:"ingress-url",level:2},{value:"NODE_IP / NODE_PORT",id:"node_ip--node_port",level:3},{value:"namespace / seldon-deployment-name",id:"namespace--seldon-deployment-name",level:3},{value:"method-name",id:"method-name",level:3},{value:"Using Swagger",id:"using-swagger",level:2},{value:"1. Accessing Swagger",id:"1-accessing-swagger",level:3},{value:"2. Selecting Swagger Predictions",id:"2-selecting-swagger-predictions",level:3},{value:"3. Choosing Try it out",id:"3-choosing-try-it-out",level:3},{value:"4. Inputting data in the Request body",id:"4-inputting-data-in-the-request-body",level:3},{value:"5. Check the inference results",id:"5-check-the-inference-results",level:3},{value:"Using CLI",id:"using-cli",level:2}],p={toc:d},c="wrapper";function u(e){let{components:n,...s}=e;return(0,l.kt)(c,(0,a.Z)({},p,s,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"deploy-with-seldondeployment"},"Deploy with SeldonDeployment"),(0,l.kt)("p",null,"Let's deploy our trained model as an API using SeldonDeployment. SeldonDeployment is a custom resource definition (CRD) defined to deploy models as REST/gRPC servers on Kubernetes."),(0,l.kt)("h4",{id:"1-prerequisites"},"1. Prerequisites"),(0,l.kt)("p",null,"We will conduct the SeldonDeployment related practice in a new namespace called seldon-deploy. After creating the namespace, set seldon-deploy as the current namespace."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create namespace seldon-deploy\nkubectl config set-context --current --namespace=seldon-deploy\n")),(0,l.kt)("h3",{id:"2-define-spec"},"2. Define Spec"),(0,l.kt)("p",null,"Generate a yaml file to deploy SeldonDeployment.\nIn this page, we will use a publicly available iris model.\nBecause this iris model is trained through the sklearn framework, we use SKLEARN_SERVER."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"cat < iris-sdep.yaml\napiVersion: machinelearning.seldon.io/v1alpha2\nkind: SeldonDeployment\nmetadata:\n name: sklearn\n namespace: seldon-deploy\nspec:\n name: iris\n predictors:\n - graph:\n children: []\n implementation: SKLEARN_SERVER\n modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris\n name: classifier\n name: default\n replicas: 1\nEOF\n")),(0,l.kt)("p",null,"Deploy yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f iris-sdep.yaml\n")),(0,l.kt)("p",null,"Check if the deployment was successful through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy\n")),(0,l.kt)("p",null,"If everyone runs, similar results will be printed."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NAME READY STATUS RESTARTS AGE\nsklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m\n")),(0,l.kt)("h2",{id:"ingress-url"},"Ingress URL"),(0,l.kt)("p",null,"Now, send a inference request to the deployed model to get the inference result. The API created by the SeldonDeployment follows the following rule:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/")),(0,l.kt)("h3",{id:"node_ip--node_port"},"NODE_IP / NODE_PORT"),(0,l.kt)("p",null,(0,l.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-seldon"},"Since Seldon Core was installed with Ambassador as the Ingress Controller"),", all APIs created by SeldonDeployment can be requested through the Ambassador Ingress gateway."),(0,l.kt)("p",null,"Therefore, first set the url of the Ambassador Ingress Gateway as an environment variable."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'export NODE_IP=$(kubectl get nodes -o jsonpath=\'{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }\')\nexport NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")\n')),(0,l.kt)("p",null,"Check the set url."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'echo "NODE_IP"=$NODE_IP\necho "NODE_PORT"=$NODE_PORT\n')),(0,l.kt)("p",null,"It should be outputted similarly as follows, and if set through the cloud, you can check that internal IP address is set."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"NODE_IP=192.168.0.19\nNODE_PORT=30486\n")),(0,l.kt)("h3",{id:"namespace--seldon-deployment-name"},"namespace / seldon-deployment-name"),(0,l.kt)("p",null,"This refers to the ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," and ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," where the SeldonDeployment is deployed and used to define the values defined in the metadata when defining the spec."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"metadata:\n name: sklearn\n namespace: seldon-deploy\n")),(0,l.kt)("p",null,"In the example above, ",(0,l.kt)("inlineCode",{parentName:"p"},"namespace")," is seldon-deploy, ",(0,l.kt)("inlineCode",{parentName:"p"},"seldon-deployment-name")," is sklearn."),(0,l.kt)("h3",{id:"method-name"},"method-name"),(0,l.kt)("p",null,"In SeldonDeployment, the commonly used ",(0,l.kt)("inlineCode",{parentName:"p"},"method-name")," has two options:"),(0,l.kt)("ol",null,(0,l.kt)("li",{parentName:"ol"},"doc"),(0,l.kt)("li",{parentName:"ol"},"predictions")),(0,l.kt)("p",null,"The detailed usage of each method is explained below."),(0,l.kt)("h2",{id:"using-swagger"},"Using Swagger"),(0,l.kt)("p",null,"First, let's explore how to use the doc method, which allows access to the Swagger generated by Seldon."),(0,l.kt)("h3",{id:"1-accessing-swagger"},"1. Accessing Swagger"),(0,l.kt)("p",null,"According to the provided ingress URL rules, you can access the Swagger documentation using the following URL:\n",(0,l.kt)("inlineCode",{parentName:"p"},"http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger1.png",src:t(2909).Z,width:"3068",height:"1650"})),(0,l.kt)("h3",{id:"2-selecting-swagger-predictions"},"2. Selecting Swagger Predictions"),(0,l.kt)("p",null,"In the Swagger UI, select the ",(0,l.kt)("inlineCode",{parentName:"p"},"/seldon/seldon-deploy/sklearn/api/v1.0/predictions")," endpoint."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger2.png",src:t(1087).Z,width:"3068",height:"1652"})),(0,l.kt)("h3",{id:"3-choosing-try-it-out"},"3. Choosing ",(0,l.kt)("em",{parentName:"h3"},"Try it out")),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger3.png",src:t(5197).Z,width:"3069",height:"1653"})),(0,l.kt)("h3",{id:"4-inputting-data-in-the-request-body"},"4. Inputting data in the Request body"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger4.png",src:t(6669).Z,width:"3072",height:"1652"})),(0,l.kt)("p",null,"Enter the following data into the Request body."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "ndarray":[[1.0, 2.0, 5.0, 6.0]]\n }\n}\n')),(0,l.kt)("h3",{id:"5-check-the-inference-results"},"5. Check the inference results"),(0,l.kt)("p",null,"You can click the ",(0,l.kt)("inlineCode",{parentName:"p"},"Execute")," button to obtain the inference result."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"iris-swagger5.png",src:t(8503).Z,width:"3583",height:"1969"})),(0,l.kt)("p",null,"If everything is executed successfully, you will obtain the following inference result."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "data": {\n "names": [\n "t:0",\n "t:1",\n "t:2"\n ],\n "ndarray": [\n [\n 9.912315378486697e-7,\n 0.0007015931307746079,\n 0.9992974156376876\n ]\n ]\n },\n "meta": {\n "requestPath": {\n "classifier": "seldonio/sklearnserver:1.11.2"\n }\n }\n}\n')),(0,l.kt)("h2",{id:"using-cli"},"Using CLI"),(0,l.kt)("p",null,"Also, you can use http client CLI tools such as curl to make API requests.\nFor example, requesting ",(0,l.kt)("inlineCode",{parentName:"p"},"/predictions")," as follows"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H 'Content-Type: application/json' \\\n-d '{ \"data\": { \"ndarray\": [[1,2,3,4]] } }'\n")),(0,l.kt)("p",null,"You can confirm that the following response is outputted normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}\n')))}u.isMDXComponent=!0},2909:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger1-1d3574d988c85be7534f518f1e5fe097.png"},1087:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger2-ff43013f3e20de5f305d2215a599aa88.png"},5197:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger3-af84538f8d07efd95a2e820e32be2670.png"},6669:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger4-8ba33dee625455b3de8326a6677ac6ca.png"},8503:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/iris-swagger5-53bd997e4f2e7f1904edebd974c6e128.png"}}]); \ No newline at end of file diff --git a/en/assets/js/c0f17dd2.234971fa.js b/en/assets/js/c0f17dd2.22ea06ec.js similarity index 98% rename from en/assets/js/c0f17dd2.234971fa.js rename to en/assets/js/c0f17dd2.22ea06ec.js index 396ae48a..1a8307ae 100644 --- a/en/assets/js/c0f17dd2.234971fa.js +++ b/en/assets/js/c0f17dd2.22ea06ec.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2011],{3905:(e,t,o)=>{o.d(t,{Zo:()=>c,kt:()=>h});var n=o(7294);function r(e,t,o){return t in e?Object.defineProperty(e,t,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[t]=o,e}function a(e,t){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),o.push.apply(o,n)}return o}function s(e){for(var t=1;t=0||(r[o]=e[o]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(r[o]=e[o])}return r}var l=n.createContext({}),u=function(e){var t=n.useContext(l),o=t;return e&&(o="function"==typeof e?e(t):s(s({},t),e)),o},c=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var o=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=u(o),m=r,h=d["".concat(l,".").concat(m)]||d[m]||p[m]||a;return o?n.createElement(h,s(s({ref:t},c),{},{components:o})):n.createElement(h,s({ref:t},c))}));function h(e,t){var o=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=o.length,s=new Array(a);s[0]=m;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var u=2;u{o.r(t),o.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var n=o(7462),r=(o(7294),o(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/volumes",id:"version-1.0/kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/volumes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments"}},l={},u=[{value:"Volumes",id:"volumes",level:2},{value:"Creating a Volume",id:"creating-a-volume",level:2}],c={toc:u},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"volumes"},"Volumes"),(0,r.kt)("p",null,"Next, let's click on the Volumes tab in the left of the Central Dashboard."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:o(7173).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"You will see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"volumes",src:o(2983).Z,width:"1386",height:"382"})),(0,r.kt)("p",null,"Volumes tab provides the functionality to manage the Persistent Volume Claims (PVC) belonging to the current user's namespace in Kubernetes' Volume (Volume)."),(0,r.kt)("p",null,"By looking at the screenshot, you can see the information of the Volume created on the ",(0,r.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," page. It can be seen that the Storage Class of the Volume is set to local-path, which is the Default Storage Class installed at the time of Kubernetes cluster installation."),(0,r.kt)("p",null,"In addition, the Volumes page can be used if you want to create, view, or delete a new Volume in the user namespace."),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"creating-a-volume"},"Creating a Volume"),(0,r.kt)("p",null,"By clicking the ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," button at the top right, you can see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-volume",src:o(6876).Z,width:"1192",height:"934"})),(0,r.kt)("p",null,"You can create a volume by specifying its name, size, storage class, and access mode."),(0,r.kt)("p",null,"When you specify the desired resource specs to create a volume, its Status will be shown as Pending on this page. When you hover over the Status icon, you will see a message that this ",(0,r.kt)("em",{parentName:"p"},"(This volume will be bound when its first consumer is created.)"),(0,r.kt)("br",{parentName:"p"}),"\n","This is according to the volume creation policy of the ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass")," used in the lab, which is local-path. ",(0,r.kt)("strong",{parentName:"p"},"This is not a problem situation."),(0,r.kt)("br",{parentName:"p"}),"\n","When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating-volume",src:o(2996).Z,width:"1572",height:"450"})))}p.isMDXComponent=!0},2996:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},7173:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},6876:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},2983:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2011],{3905:(e,t,o)=>{o.d(t,{Zo:()=>c,kt:()=>h});var n=o(7294);function r(e,t,o){return t in e?Object.defineProperty(e,t,{value:o,enumerable:!0,configurable:!0,writable:!0}):e[t]=o,e}function a(e,t){var o=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),o.push.apply(o,n)}return o}function s(e){for(var t=1;t=0||(r[o]=e[o]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,o)&&(r[o]=e[o])}return r}var l=n.createContext({}),u=function(e){var t=n.useContext(l),o=t;return e&&(o="function"==typeof e?e(t):s(s({},t),e)),o},c=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var o=e.components,r=e.mdxType,a=e.originalType,l=e.parentName,c=i(e,["components","mdxType","originalType","parentName"]),d=u(o),m=r,h=d["".concat(l,".").concat(m)]||d[m]||p[m]||a;return o?n.createElement(h,s(s({ref:t},c),{},{components:o})):n.createElement(h,s({ref:t},c))}));function h(e,t){var o=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=o.length,s=new Array(a);s[0]=m;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[d]="string"==typeof e?e:r,s[1]=i;for(var u=2;u{o.r(t),o.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var n=o(7462),r=(o(7294),o(3905));const a={title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/volumes",id:"version-1.0/kubeflow-dashboard-guide/volumes",title:"4. Volumes",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/volumes.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/volumes",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/volumes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/volumes.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Volumes",description:"",sidebar_position:4,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Tensorboards",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/tensorboards"},next:{title:"5. Experiments(AutoML)",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/experiments"}},l={},u=[{value:"Volumes",id:"volumes",level:2},{value:"Creating a Volume",id:"creating-a-volume",level:2}],c={toc:u},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,n.Z)({},c,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"volumes"},"Volumes"),(0,r.kt)("p",null,"Next, let's click on the Volumes tab in the left of the Central Dashboard."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"left-tabs",src:o(7173).Z,width:"3940",height:"1278"})),(0,r.kt)("p",null,"You will see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"volumes",src:o(2983).Z,width:"1386",height:"382"})),(0,r.kt)("p",null,"Volumes tab provides the functionality to manage the Persistent Volume Claims (PVC) belonging to the current user's namespace in Kubernetes' Volume (Volume)."),(0,r.kt)("p",null,"By looking at the screenshot, you can see the information of the Volume created on the ",(0,r.kt)("a",{parentName:"p",href:"../kubeflow-dashboard-guide/notebooks"},"1. Notebooks")," page. It can be seen that the Storage Class of the Volume is set to local-path, which is the Default Storage Class installed at the time of Kubernetes cluster installation."),(0,r.kt)("p",null,"In addition, the Volumes page can be used if you want to create, view, or delete a new Volume in the user namespace."),(0,r.kt)("hr",null),(0,r.kt)("h2",{id:"creating-a-volume"},"Creating a Volume"),(0,r.kt)("p",null,"By clicking the ",(0,r.kt)("inlineCode",{parentName:"p"},"+ NEW VOLUME")," button at the top right, you can see the following screen."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"new-volume",src:o(6876).Z,width:"1192",height:"934"})),(0,r.kt)("p",null,"You can create a volume by specifying its name, size, storage class, and access mode."),(0,r.kt)("p",null,"When you specify the desired resource specs to create a volume, its Status will be shown as Pending on this page. When you hover over the Status icon, you will see a message that this ",(0,r.kt)("em",{parentName:"p"},"(This volume will be bound when its first consumer is created.)"),(0,r.kt)("br",{parentName:"p"}),"\n","This is according to the volume creation policy of the ",(0,r.kt)("a",{parentName:"p",href:"https://kubernetes.io/ko/docs/concepts/storage/storage-classes/"},"StorageClass")," used in the lab, which is local-path. ",(0,r.kt)("strong",{parentName:"p"},"This is not a problem situation."),(0,r.kt)("br",{parentName:"p"}),"\n","When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"creating-volume",src:o(2996).Z,width:"1572",height:"450"})))}p.isMDXComponent=!0},2996:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/creating-volume-38085f1d8dcc5f1a0f2df336a6ad99e7.png"},7173:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},6876:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/new-volume-b14c633d4f22b7948f111122da491ccd.png"},2983:(e,t,o)=>{o.d(t,{Z:()=>n});const n=o.p+"assets/images/volumes-8a47fc94771470514efa705ec8b6d0fe.png"}}]); \ No newline at end of file diff --git a/en/assets/js/c83b8faa.9ac2feda.js b/en/assets/js/c83b8faa.5270a3d5.js similarity index 99% rename from en/assets/js/c83b8faa.9ac2feda.js rename to en/assets/js/c83b8faa.5270a3d5.js index d0098429..1167bb3f 100644 --- a/en/assets/js/c83b8faa.9ac2feda.js +++ b/en/assets/js/c83b8faa.5270a3d5.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2052],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function s(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var l=a.createContext({}),u=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},p=function(e){var t=u(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,l=e.parentName,p=o(e,["components","mdxType","originalType","parentName"]),d=u(n),m=i,f=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(f,s(s({ref:t},p),{},{components:n})):a.createElement(f,s({ref:t},p))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,s=new Array(r);s[0]=m;var o={};for(var l in t)hasOwnProperty.call(t,l)&&(o[l]=t[l]);o.originalType=e,o[d]="string"==typeof e?e:i,s[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>u});var a=n(7462),i=(n(7294),n(3905));const r={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,o={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"version-1.0/setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/en/docs/1.0/setup-components/install-components-kf"}},l={},u=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. Install NVIDIA-Docker.",id:"2-install-nvidia-docker",level:2},{value:"3. Setting NVIDIA-Docker as the Default Container Runtime",id:"3-setting-nvidia-docker-as-the-default-container-runtime",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],p={toc:u},d="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"For using GPU in Kubernetes and Kubeflow, the following tasks are required."),(0,i.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,i.kt)("p",null,"If the following screen is output when executing ",(0,i.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),", please omit this step."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("p",null,"If the output of nvidia-smi is not as above, please install the nvidia driver that fits your installed GPU."),(0,i.kt)("p",null,"If you are not familiar with the installation of nvidia drivers, please install it through the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,i.kt)("h2",{id:"2-install-nvidia-docker"},"2. Install NVIDIA-Docker."),(0,i.kt)("p",null,"Let's install NVIDIA-Docker."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,i.kt)("p",null,"To check if it is installed correctly, we will run the docker container using the GPU."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,i.kt)("p",null,"If the following message appears, it means that the installation was successful: "),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("h2",{id:"3-setting-nvidia-docker-as-the-default-container-runtime"},"3. Setting NVIDIA-Docker as the Default Container Runtime"),(0,i.kt)("p",null,"By default, Kubernetes uses Docker-CE as the default container runtime. To use NVIDIA GPU within Docker containers, you need to configure NVIDIA-Docker as the container runtime and modify the default runtime for creating pods."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Open the ",(0,i.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," file and make the following modifications:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"After confirming the file changes, restart Docker."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the changes have been applied."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the installation was successful."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,i.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Create the nvidia-device-plugin daemonset."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nvidia-device-plugin pod is in the RUNNING state."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")))),(0,i.kt)("p",null,"You should see the following output:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n")),(0,i.kt)("ol",{start:3},(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nodes have been configured to have GPUs available."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the configuration was successful.",(0,i.kt)("br",{parentName:"p"}),"\n","(",(0,i.kt)("em",{parentName:"p"},"In the "),"MLOps for ALL* tutorial cluster, there are two GPUs, so the output is 2.\nIf the output shows the correct number of GPUs for your cluster, it is fine.)"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")),(0,i.kt)("p",{parentName:"li"},"If it is not configured, the GPU value will be displayed as ",(0,i.kt)("inlineCode",{parentName:"p"},""),"."))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2052],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function s(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var l=a.createContext({}),u=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):s(s({},t),e)),n},p=function(e){var t=u(e.components);return a.createElement(l.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,l=e.parentName,p=o(e,["components","mdxType","originalType","parentName"]),d=u(n),m=i,f=d["".concat(l,".").concat(m)]||d[m]||c[m]||r;return n?a.createElement(f,s(s({ref:t},p),{},{components:n})):a.createElement(f,s({ref:t},p))}));function f(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,s=new Array(r);s[0]=m;var o={};for(var l in t)hasOwnProperty.call(t,l)&&(o[l]=t[l]);o.originalType=e,o[d]="string"==typeof e?e:i,s[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>c,frontMatter:()=>r,metadata:()=>o,toc:()=>u});var a=n(7462),i=(n(7294),n(3905));const r={title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},s=void 0,o={unversionedId:"setup-kubernetes/setup-nvidia-gpu",id:"version-1.0/setup-kubernetes/setup-nvidia-gpu",title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/setup-nvidia-gpu",permalink:"/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/setup-nvidia-gpu.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. (Optional) Setup GPU",description:"Install nvidia docker, nvidia device plugin",sidebar_position:6,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"5. Install Kubernetes Modules",permalink:"/en/docs/1.0/setup-kubernetes/install-kubernetes-module"},next:{title:"1. Kubeflow",permalink:"/en/docs/1.0/setup-components/install-components-kf"}},l={},u=[{value:"1. Install NVIDIA Driver",id:"1-install-nvidia-driver",level:2},{value:"2. Install NVIDIA-Docker.",id:"2-install-nvidia-docker",level:2},{value:"3. Setting NVIDIA-Docker as the Default Container Runtime",id:"3-setting-nvidia-docker-as-the-default-container-runtime",level:2},{value:"4. Nvidia-Device-Plugin",id:"4-nvidia-device-plugin",level:2}],p={toc:u},d="wrapper";function c(e){let{components:t,...n}=e;return(0,i.kt)(d,(0,a.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"For using GPU in Kubernetes and Kubeflow, the following tasks are required."),(0,i.kt)("h2",{id:"1-install-nvidia-driver"},"1. Install NVIDIA Driver"),(0,i.kt)("p",null,"If the following screen is output when executing ",(0,i.kt)("inlineCode",{parentName:"p"},"nvidia-smi"),", please omit this step."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ nvidia-smi \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n| 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |\n| 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |\n| 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("p",null,"If the output of nvidia-smi is not as above, please install the nvidia driver that fits your installed GPU."),(0,i.kt)("p",null,"If you are not familiar with the installation of nvidia drivers, please install it through the following command."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo add-apt-repository ppa:graphics-drivers/ppa\nsudo apt update && sudo apt install -y ubuntu-drivers-common\nsudo ubuntu-drivers autoinstall\nsudo reboot\n")),(0,i.kt)("h2",{id:"2-install-nvidia-docker"},"2. Install NVIDIA-Docker."),(0,i.kt)("p",null,"Let's install NVIDIA-Docker."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \\\n sudo apt-key add -\ndistribution=$(. /etc/os-release;echo $ID$VERSION_ID)\ncurl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list\nsudo apt-get update\nsudo apt-get install -y nvidia-docker2 &&\nsudo systemctl restart docker\n")),(0,i.kt)("p",null,"To check if it is installed correctly, we will run the docker container using the GPU."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n")),(0,i.kt)("p",null,"If the following message appears, it means that the installation was successful: "),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi\n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |\n|-------------------------------+----------------------+----------------------+\n| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n| | | MIG M. |\n|===============================+======================+======================|\n| 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |\n| 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n| 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |\n| 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |\n| | | N/A |\n+-------------------------------+----------------------+----------------------+\n \n+-----------------------------------------------------------------------------+\n| Processes: |\n| GPU GI CI PID Type Process name GPU Memory |\n| ID ID Usage |\n|=============================================================================|\n+-----------------------------------------------------------------------------+\n")),(0,i.kt)("h2",{id:"3-setting-nvidia-docker-as-the-default-container-runtime"},"3. Setting NVIDIA-Docker as the Default Container Runtime"),(0,i.kt)("p",null,"By default, Kubernetes uses Docker-CE as the default container runtime. To use NVIDIA GPU within Docker containers, you need to configure NVIDIA-Docker as the container runtime and modify the default runtime for creating pods."),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Open the ",(0,i.kt)("inlineCode",{parentName:"p"},"/etc/docker/daemon.json")," file and make the following modifications:"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'sudo vi /etc/docker/daemon.json\n\n{\n "default-runtime": "nvidia",\n "runtimes": {\n "nvidia": {\n "path": "nvidia-container-runtime",\n "runtimeArgs": []\n }\n }\n}\n'))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"After confirming the file changes, restart Docker."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo systemctl daemon-reload\nsudo service docker restart\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the changes have been applied."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"sudo docker info | grep nvidia\n")),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the installation was successful."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"mlops@ubuntu:~$ docker info | grep nvidia\nRuntimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc\nDefault Runtime: nvidia\n")))),(0,i.kt)("h2",{id:"4-nvidia-device-plugin"},"4. Nvidia-Device-Plugin"),(0,i.kt)("ol",null,(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Create the nvidia-device-plugin daemonset."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml\n"))),(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nvidia-device-plugin pod is in the RUNNING state."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n kube-system | grep nvidia\n")))),(0,i.kt)("p",null,"You should see the following output:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"kube-system nvidia-device-plugin-daemonset-nlqh2 1/1 Running 0 1h\n")),(0,i.kt)("ol",{start:3},(0,i.kt)("li",{parentName:"ol"},(0,i.kt)("p",{parentName:"li"},"Verify that the nodes have been configured to have GPUs available."),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},'kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\\.com/gpu"\n')),(0,i.kt)("p",{parentName:"li"},"If you see the following message, it means that the configuration was successful.",(0,i.kt)("br",{parentName:"p"}),"\n","(",(0,i.kt)("em",{parentName:"p"},"In the "),"MLOps for ALL* tutorial cluster, there are two GPUs, so the output is 2.\nIf the output shows the correct number of GPUs for your cluster, it is fine.)"),(0,i.kt)("pre",{parentName:"li"},(0,i.kt)("code",{parentName:"pre",className:"language-bash"},"NAME GPU\nubuntu 2\n")),(0,i.kt)("p",{parentName:"li"},"If it is not configured, the GPU value will be displayed as ",(0,i.kt)("inlineCode",{parentName:"p"},""),"."))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/cd8cc8f6.f9f78fcd.js b/en/assets/js/cd8cc8f6.abb29663.js similarity index 98% rename from en/assets/js/cd8cc8f6.f9f78fcd.js rename to en/assets/js/cd8cc8f6.abb29663.js index 5494b498..caea0938 100644 --- a/en/assets/js/cd8cc8f6.f9f78fcd.js +++ b/en/assets/js/cd8cc8f6.abb29663.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1973],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var u=r.createContext({}),c=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},s=function(e){var t=c(e.components);return r.createElement(u.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,u=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),p=c(n),d=o,b=p["".concat(u,".").concat(d)]||p[d]||f[d]||i;return n?r.createElement(b,a(a({ref:t},s),{},{components:n})):r.createElement(b,a({ref:t},s))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=d;var l={};for(var u in t)hasOwnProperty.call(t,u)&&(l[u]=t[u]);l.originalType=e,l[p]="string"==typeof e?e:o,a[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>a,default:()=>f,frontMatter:()=>i,metadata:()=>l,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},a=void 0,l={unversionedId:"kubeflow/kubeflow-intro",id:"kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/en/docs/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/en/docs/kubeflow/kubeflow-concepts"}},u={},c=[],s={toc:c},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"To use Kubeflow, you need to write components and pipelines."),(0,o.kt)("p",null,"The approach described in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL")," differs slightly from the method described on the ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline official website"),". Here, Kubeflow Pipeline is used as one of the components in the ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts#component-contents"},"elements that make up MLOps")," rather than a standalone workflow."),(0,o.kt)("p",null,"Now, let's understand what components and pipelines are and how to write them."))}f.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1973],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>b});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var u=r.createContext({}),c=function(e){var t=r.useContext(u),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},s=function(e){var t=c(e.components);return r.createElement(u.Provider,{value:t},e.children)},p="mdxType",f={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},d=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,u=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),p=c(n),d=o,b=p["".concat(u,".").concat(d)]||p[d]||f[d]||i;return n?r.createElement(b,a(a({ref:t},s),{},{components:n})):r.createElement(b,a({ref:t},s))}));function b(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=d;var l={};for(var u in t)hasOwnProperty.call(t,u)&&(l[u]=t[u]);l.originalType=e,l[p]="string"==typeof e?e:o,a[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>u,contentTitle:()=>a,default:()=>f,frontMatter:()=>i,metadata:()=>l,toc:()=>c});var r=n(7462),o=(n(7294),n(3905));const i={title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},a=void 0,l={unversionedId:"kubeflow/kubeflow-intro",id:"kubeflow/kubeflow-intro",title:"1. Kubeflow Introduction",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/kubeflow-intro.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-intro",permalink:"/en/docs/kubeflow/kubeflow-intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Kubeflow Introduction",description:"",sidebar_position:1,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/kubeflow-dashboard-guide/experiments-and-others"},next:{title:"2. Kubeflow Concepts",permalink:"/en/docs/kubeflow/kubeflow-concepts"}},u={},c=[],s={toc:c},p="wrapper";function f(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},s,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"To use Kubeflow, you need to write components and pipelines."),(0,o.kt)("p",null,"The approach described in ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL")," differs slightly from the method described on the ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/overview/quickstart/"},"Kubeflow Pipeline official website"),". Here, Kubeflow Pipeline is used as one of the components in the ",(0,o.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/kubeflow-concepts#component-contents"},"elements that make up MLOps")," rather than a standalone workflow."),(0,o.kt)("p",null,"Now, let's understand what components and pipelines are and how to write them."))}f.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/cf42168c.a95d6bd9.js b/en/assets/js/cf42168c.90a034eb.js similarity index 99% rename from en/assets/js/cf42168c.a95d6bd9.js rename to en/assets/js/cf42168c.90a034eb.js index db19bb93..7da9e466 100644 --- a/en/assets/js/cf42168c.a95d6bd9.js +++ b/en/assets/js/cf42168c.90a034eb.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8376],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>h});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function a(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var s=r.createContext({}),p=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):a(a({},n),e)),t},d=function(e){var n=p(e.components);return r.createElement(s.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},m=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),m=o,h=c["".concat(s,".").concat(m)]||c[m]||u[m]||i;return t?r.createElement(h,a(a({ref:n},d),{},{components:t})):r.createElement(h,a({ref:n},d))}));function h(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var i=t.length,a=new Array(i);a[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>a,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var r=t(7462),o=(t(7294),t(3905));const i={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},a=void 0,l={unversionedId:"api-deployment/what-is-api-deployment",id:"api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/en/docs/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/what-is-api-deployment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/en/docs/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/api-deployment/seldon-iris"}},s={},p=[{value:"What is API Deployment?",id:"what-is-api-deployment",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],d={toc:p},c="wrapper";function u(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,r.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"what-is-api-deployment"},"What is API Deployment?"),(0,o.kt)("p",null,"After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily."),(0,o.kt)("p",null,"When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people."),(0,o.kt)("p",null,"Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services."),(0,o.kt)("p",null,"However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines."),(0,o.kt)("p",null,"Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response."),(0,o.kt)("p",null,"Some well-known open-source inference engines include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow: Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch: Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"ONNX: ONNX Runtime"))),(0,o.kt)("p",null,"While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost."),(0,o.kt)("p",null,"Deploying and serving the model's inference results through an API is called ",(0,o.kt)("strong",{parentName:"p"},"API deployment"),"."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks."),(0,o.kt)("p",null,"To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment."),(0,o.kt)("p",null,"Some popular serving frameworks include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,"In ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", we use Seldon Core to demonstrate the process of API deployment."))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8376],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>h});var r=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function i(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function a(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var s=r.createContext({}),p=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):a(a({},n),e)),t},d=function(e){var n=p(e.components);return r.createElement(s.Provider,{value:n},e.children)},c="mdxType",u={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},m=r.forwardRef((function(e,n){var t=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),c=p(t),m=o,h=c["".concat(s,".").concat(m)]||c[m]||u[m]||i;return t?r.createElement(h,a(a({ref:n},d),{},{components:t})):r.createElement(h,a({ref:n},d))}));function h(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var i=t.length,a=new Array(i);a[0]=m;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l[c]="string"==typeof e?e:o,a[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>a,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var r=t(7462),o=(t(7294),t(3905));const i={title:"1. What is API Deployment?",description:"",sidebar_position:1,date:new Date("2021-12-22T00:00:00.000Z"),lastmod:new Date("2021-12-22T00:00:00.000Z"),contributors:["Youngcheol Jang"]},a=void 0,l={unversionedId:"api-deployment/what-is-api-deployment",id:"api-deployment/what-is-api-deployment",title:"1. What is API Deployment?",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/what-is-api-deployment.md",sourceDirName:"api-deployment",slug:"/api-deployment/what-is-api-deployment",permalink:"/en/docs/api-deployment/what-is-api-deployment",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/what-is-api-deployment.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. What is API Deployment?",description:"",sidebar_position:1,date:"2021-12-22T00:00:00.000Z",lastmod:"2021-12-22T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"13. Component - Debugging",permalink:"/en/docs/kubeflow/how-to-debug"},next:{title:"2. Deploy SeldonDeployment",permalink:"/en/docs/api-deployment/seldon-iris"}},s={},p=[{value:"What is API Deployment?",id:"what-is-api-deployment",level:2},{value:"Serving Framework",id:"serving-framework",level:2}],d={toc:p},c="wrapper";function u(e){let{components:n,...t}=e;return(0,o.kt)(c,(0,r.Z)({},d,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"what-is-api-deployment"},"What is API Deployment?"),(0,o.kt)("p",null,"After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily."),(0,o.kt)("p",null,"When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people."),(0,o.kt)("p",null,"Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services."),(0,o.kt)("p",null,"However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines."),(0,o.kt)("p",null,"Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response."),(0,o.kt)("p",null,"Some well-known open-source inference engines include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/tensorflow/serving"},"Tensorflow: Tensorflow Serving")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/pytorch/serve"},"PyTorch: Torchserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/microsoft/onnxruntime"},"ONNX: ONNX Runtime"))),(0,o.kt)("p",null,"While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost."),(0,o.kt)("p",null,"Deploying and serving the model's inference results through an API is called ",(0,o.kt)("strong",{parentName:"p"},"API deployment"),"."),(0,o.kt)("h2",{id:"serving-framework"},"Serving Framework"),(0,o.kt)("p",null,"I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks."),(0,o.kt)("p",null,"To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment."),(0,o.kt)("p",null,"Some popular serving frameworks include:"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/SeldonIO/seldon-core"},"Seldon Core")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/kserve"},"Kserve")),(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://github.com/bentoml/BentoML"},"BentoML"))),(0,o.kt)("p",null,"In ",(0,o.kt)("em",{parentName:"p"},"MLOps for ALL"),", we use Seldon Core to demonstrate the process of API deployment."))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/d0c9c887.d875b8ef.js b/en/assets/js/d0c9c887.f72175e6.js similarity index 99% rename from en/assets/js/d0c9c887.d875b8ef.js rename to en/assets/js/d0c9c887.f72175e6.js index 69b6be76..110e0aa7 100644 --- a/en/assets/js/d0c9c887.d875b8ef.js +++ b/en/assets/js/d0c9c887.f72175e6.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2571],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>_});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),m=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=m(e.components);return a.createElement(s.Provider,{value:n},e.children)},p="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),p=m(t),u=l,_=p["".concat(s,".").concat(u)]||p[u]||c[u]||r;return t?a.createElement(_,o(o({ref:n},d),{},{components:t})):a.createElement(_,o({ref:n},d))}));function _(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:l,o[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-children",id:"version-1.0/api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/en/docs/1.0/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-children.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/en/docs/1.0/api-deployment/seldon-mlflow"},next:{title:"1. Install Python virtual environment",permalink:"/en/docs/1.0/appendix/pyenv"}},s={},m=[],d={toc:m},p="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(p,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("p",null,"Previously, the methods explained were all targeted at a single model. On this page, we will look at how to connect multiple models. "),(0,l.kt)("p",null,"First, we will create a pipeline that creates two models. We will add a StandardScaler to the SVC model we used before and store it."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"If you upload the pipeline, it will look like this.\n",(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(5934).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"When you check the MLflow dashboard, two models will be generated, as shown below. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(1892).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"After checking the run_id of each one, define the SeldonDeployment spec as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc."),(0,l.kt)("p",null,"Now let's create the above specifications in a yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"Create an API through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"If properly performed, it will be outputted as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"Check to see if it has been generated normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"If it is created normally, a similar pod will be created."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},5934:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},1892:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2571],{3905:(e,n,t)=>{t.d(n,{Zo:()=>d,kt:()=>_});var a=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function r(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function o(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=a.createContext({}),m=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):o(o({},n),e)),t},d=function(e){var n=m(e.components);return a.createElement(s.Provider,{value:n},e.children)},p="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,l=e.mdxType,r=e.originalType,s=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),p=m(t),u=l,_=p["".concat(s,".").concat(u)]||p[u]||c[u]||r;return t?a.createElement(_,o(o({ref:n},d),{},{components:t})):a.createElement(_,o({ref:n},d))}));function _(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var r=t.length,o=new Array(r);o[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[p]="string"==typeof e?e:l,o[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>o,default:()=>c,frontMatter:()=>r,metadata:()=>i,toc:()=>m});var a=t(7462),l=(t(7294),t(3905));const r={title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},o=void 0,i={unversionedId:"api-deployment/seldon-children",id:"version-1.0/api-deployment/seldon-children",title:"6. Multi Models",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/api-deployment/seldon-children.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-children",permalink:"/en/docs/1.0/api-deployment/seldon-children",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/api-deployment/seldon-children.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. Multi Models",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Model from MLflow",permalink:"/en/docs/1.0/api-deployment/seldon-mlflow"},next:{title:"1. Install Python virtual environment",permalink:"/en/docs/1.0/appendix/pyenv"}},s={},m=[],d={toc:m},p="wrapper";function c(e){let{components:n,...r}=e;return(0,l.kt)(p,(0,a.Z)({},d,r,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("p",null,"Previously, the methods explained were all targeted at a single model. On this page, we will look at how to connect multiple models. "),(0,l.kt)("p",null,"First, we will create a pipeline that creates two models. We will add a StandardScaler to the SVC model we used before and store it."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_scaler_from_csv(\n data_path: InputPath("csv"),\n scaled_data_path: OutputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n):\n import dill\n import pandas as pd\n from sklearn.preprocessing import StandardScaler\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n data = pd.read_csv(data_path)\n\n scaler = StandardScaler()\n scaled_data = scaler.fit_transform(data)\n scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)\n\n scaled_data.to_csv(scaled_data_path, index=False)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(scaler, file_writer)\n\n input_example = data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(data, scaler.transform(data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_svc_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["scikit-learn"],\n install_mlflow=False\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\nfrom kfp.dsl import pipeline\n\n\n@pipeline(name="multi_model_pipeline")\ndef multi_model_pipeline(kernel: str = "rbf"):\n iris_data = load_iris_data()\n scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])\n _ = upload_sklearn_model_to_mlflow(\n model_name="scaler",\n model=scaled_data.outputs["model"],\n input_example=scaled_data.outputs["input_example"],\n signature=scaled_data.outputs["signature"],\n conda_env=scaled_data.outputs["conda_env"],\n )\n model = train_svc_from_csv(\n train_data=scaled_data.outputs["scaled_data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name="svc",\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")\n\n')),(0,l.kt)("p",null,"If you upload the pipeline, it will look like this.\n",(0,l.kt)("img",{alt:"children-kubeflow.png",src:t(5934).Z,width:"2698",height:"1886"})),(0,l.kt)("p",null,"When you check the MLflow dashboard, two models will be generated, as shown below. "),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"children-mlflow.png",src:t(1892).Z,width:"3006",height:"1744"})),(0,l.kt)("p",null,"After checking the run_id of each one, define the SeldonDeployment spec as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: seldonio/mlflowserver:1.8.0-dev\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n')),(0,l.kt)("p",null,"The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc."),(0,l.kt)("p",null,"Now let's create the above specifications in a yaml file."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},'cat < multi-model.yaml\napiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: multi-model-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: scaler-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n - name: svc-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: scaler\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n - name: svc\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: scaler\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: predict_method\n type: STRING\n value: "transform"\n children:\n - name: svc\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\nEOF\n')),(0,l.kt)("p",null,"Create an API through the following command."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f multi-model.yaml\n")),(0,l.kt)("p",null,"If properly performed, it will be outputted as follows."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/multi-model-example created\n")),(0,l.kt)("p",null,"Check to see if it has been generated normally."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep multi-model-example\n")),(0,l.kt)("p",null,"If it is created normally, a similar pod will be created."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"multi-model-example-model-0-scaler-svc-9955fb795-n9ffw 4/4 Running 0 2m30s\n")))}c.isMDXComponent=!0},5934:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-kubeflow-5100745b1be1aa100dd153b1785ad218.png"},1892:(e,n,t)=>{t.d(n,{Z:()=>a});const a=t.p+"assets/images/children-mlflow-5190d0e3f19a5772de21d1b08ece4822.png"}}]); \ No newline at end of file diff --git a/en/assets/js/d10c9a0a.8a4b91cb.js b/en/assets/js/d10c9a0a.78f9feab.js similarity index 99% rename from en/assets/js/d10c9a0a.8a4b91cb.js rename to en/assets/js/d10c9a0a.78f9feab.js index 67655ea8..658a668b 100644 --- a/en/assets/js/d10c9a0a.8a4b91cb.js +++ b/en/assets/js/d10c9a0a.78f9feab.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8367],{3905:(e,n,a)=>{a.d(n,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=t.createContext({}),s=function(e){var n=t.useContext(p),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},d=function(e){var n=s(e.components);return t.createElement(p.Provider,{value:n},e.children)},m="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(p,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:n},d),{},{components:a})):t.createElement(c,i({ref:n},d))}));function c(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,i=new Array(l);i[0]=u;var o={};for(var p in n)hasOwnProperty.call(n,p)&&(o[p]=n[p]);o.originalType=e,o[m]="string"==typeof e?e:r,i[1]=o;for(var s=2;s{a.r(n),a.d(n,{assets:()=>p,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>o,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,o={unversionedId:"kubeflow/advanced-mlflow",id:"kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/en/docs/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/en/docs/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/en/docs/kubeflow/how-to-debug"}},p={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. Train model",id:"1-train-model",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(e){let{components:n,...l}=e;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,"In this page, we will explain the process of writing a component to store the model in MLFlow so that the model trained in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-component"},"Advanced Usage Component")," can be linked to API deployment."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"In order to store the model in MLFlow and use it in serving, the following items are needed."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"We will look into the process of saving a model to MLFlow through Python code."),(0,r.kt)("h3",{id:"1-train-model"},"1. Train model"),(0,r.kt)("p",null,"The following steps involve training an SVC model using the iris dataset."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"This process creates the necessary information for MLFlow."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"Each variable's content is as follows."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"Next, we save the learned information and the model. Since the trained model uses the sklearn package, we can easily save the model using ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"If you work locally, a svc folder will be created and the following files will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"If you execute the command above, you can check the following output value."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"Each file will be as follows if checked."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"Now, let's proceed with the task of uploading the saved model to the MLflow server."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"Save and open the ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," directory generated path with ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," command to launch mlflow server and dashboard.\nAccess the mlflow dashboard, click the generated run to view it as below."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(8730).Z,width:"2782",height:"2496"}),"\n(This screen may vary depending on the version of mlflow.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"Now, let's write a reusable component in Kubeflow."),(0,r.kt)("p",null,"The ways of writing components that can be reused are broadly divided into three categories."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"After saving the necessary environment in the component responsible for model training, the MLflow component is only responsible for the upload."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(6694).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Pass the trained model and data to the MLflow component, which is responsible for saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(5944).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"The component responsible for model training handles both saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(5109).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"We are trying to manage the model through the first approach.\nThe reason is that we don't need to write the code to upload the MLFlow model every time like three times for each component written."),(0,r.kt)("p",null,"Reusing components is possible by the methods 1 and 2.\nHowever, in the case of 2, it is necessary to deliver the trained image and packages to the component, so ultimately additional information about the component must be delivered."),(0,r.kt)("p",null,"In order to proceed with the method 1, the learning component must also be changed.\nCode that stores the environment needed to save the model must be added."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"Write a component to upload to MLFlow.\nAt this time, configure the uploaded MLFlow endpoint to be connected to the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-mlflow"},"mlflow service")," that we installed.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, use the Kubernetes Service DNS Name of the Minio installed at the time of MLFlow Server installation. As this service is created in the Kubeflow namespace with the name minio-service, set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000"),".",(0,r.kt)("br",{parentName:"p"}),"\n","Similarly, for the tracking_uri address, use the Kubernetes Service DNS Name of the MLFlow server and set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"Now let's connect the components we have written and create a pipeline. "),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"The data we will use to train the model is sklearn's iris.\nWe will write a component to generate the data."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"The pipeline code can be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"If you organize the components and pipelines written above into a single Python file, it would look like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(5041).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"Port-forward the mlflow service to access the MLflow UI."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(1757).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"Click on run to verify that the trained model file is present."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(1874).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},8730:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},6694:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},5944:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},5109:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},5041:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},1757:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},1874:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8367],{3905:(e,n,a)=>{a.d(n,{Zo:()=>d,kt:()=>c});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var p=t.createContext({}),s=function(e){var n=t.useContext(p),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},d=function(e){var n=s(e.components);return t.createElement(p.Provider,{value:n},e.children)},m="mdxType",_={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},u=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,l=e.originalType,p=e.parentName,d=o(e,["components","mdxType","originalType","parentName"]),m=s(a),u=r,c=m["".concat(p,".").concat(u)]||m[u]||_[u]||l;return a?t.createElement(c,i(i({ref:n},d),{},{components:a})):t.createElement(c,i({ref:n},d))}));function c(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var l=a.length,i=new Array(l);i[0]=u;var o={};for(var p in n)hasOwnProperty.call(n,p)&&(o[p]=n[p]);o.originalType=e,o[m]="string"==typeof e?e:r,i[1]=o;for(var s=2;s{a.r(n),a.d(n,{assets:()=>p,contentTitle:()=>i,default:()=>_,frontMatter:()=>l,metadata:()=>o,toc:()=>s});var t=a(7462),r=(a(7294),a(3905));const l={title:"12. Component - MLFlow",description:"",sidebar_position:12,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-20T00:00:00.000Z"),contributors:["Jongseob Jeon","SeungTae Kim"]},i=void 0,o={unversionedId:"kubeflow/advanced-mlflow",id:"kubeflow/advanced-mlflow",title:"12. Component - MLFlow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/advanced-mlflow.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-mlflow",permalink:"/en/docs/kubeflow/advanced-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/advanced-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:12,frontMatter:{title:"12. Component - MLFlow",description:"",sidebar_position:12,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-20T00:00:00.000Z",contributors:["Jongseob Jeon","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"11. Pipeline - Run Result",permalink:"/en/docs/kubeflow/advanced-run"},next:{title:"13. Component - Debugging",permalink:"/en/docs/kubeflow/how-to-debug"}},p={},s=[{value:"MLFlow Component",id:"mlflow-component",level:2},{value:"MLFlow in Local",id:"mlflow-in-local",level:2},{value:"1. Train model",id:"1-train-model",level:3},{value:"2. MLFLow Infos",id:"2-mlflow-infos",level:3},{value:"3. Save MLFLow Infos",id:"3-save-mlflow-infos",level:3},{value:"MLFlow on Server",id:"mlflow-on-server",level:2},{value:"MLFlow Component",id:"mlflow-component-1",level:2},{value:"MLFlow Pipeline",id:"mlflow-pipeline",level:2},{value:"Data Component",id:"data-component",level:3},{value:"Pipeline",id:"pipeline",level:3},{value:"Run",id:"run",level:3}],d={toc:s},m="wrapper";function _(e){let{components:n,...l}=e;return(0,r.kt)(m,(0,t.Z)({},d,l,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlflow-component"},"MLFlow Component"),(0,r.kt)("p",null,"In this page, we will explain the process of writing a component to store the model in MLFlow so that the model trained in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-component"},"Advanced Usage Component")," can be linked to API deployment."),(0,r.kt)("h2",{id:"mlflow-in-local"},"MLFlow in Local"),(0,r.kt)("p",null,"In order to store the model in MLFlow and use it in serving, the following items are needed."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"model"),(0,r.kt)("li",{parentName:"ul"},"signature"),(0,r.kt)("li",{parentName:"ul"},"input_example"),(0,r.kt)("li",{parentName:"ul"},"conda_env")),(0,r.kt)("p",null,"We will look into the process of saving a model to MLFlow through Python code."),(0,r.kt)("h3",{id:"1-train-model"},"1. Train model"),(0,r.kt)("p",null,"The following steps involve training an SVC model using the iris dataset."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import pandas as pd\nfrom sklearn.datasets import load_iris\nfrom sklearn.svm import SVC\n\niris = load_iris()\n\ndata = pd.DataFrame(iris["data"], columns=iris["feature_names"])\ntarget = pd.DataFrame(iris["target"], columns=["target"])\n\nclf = SVC(kernel="rbf")\nclf.fit(data, target)\n\n')),(0,r.kt)("h3",{id:"2-mlflow-infos"},"2. MLFLow Infos"),(0,r.kt)("p",null,"This process creates the necessary information for MLFlow."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.models.signature import infer_signature\nfrom mlflow.utils.environment import _mlflow_conda_env\n\ninput_example = data.sample(1)\nsignature = infer_signature(data, clf.predict(data))\nconda_env = _mlflow_conda_env(additional_pip_deps=["dill", "pandas", "scikit-learn"])\n')),(0,r.kt)("p",null,"Each variable's content is as follows."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"input_example")),(0,r.kt)("table",{parentName:"li"},(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null},"sepal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"sepal width (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal length (cm)"),(0,r.kt)("th",{parentName:"tr",align:null},"petal width (cm)"))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},"6.5"),(0,r.kt)("td",{parentName:"tr",align:null},"6.7"),(0,r.kt)("td",{parentName:"tr",align:null},"3.1"),(0,r.kt)("td",{parentName:"tr",align:null},"4.4"))))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"signature")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"inputs:\n ['sepal length (cm)': double, 'sepal width (cm)': double, 'petal length (cm)': double, 'petal width (cm)': double]\noutputs:\n [Tensor('int64', (-1,))]\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},(0,r.kt)("inlineCode",{parentName:"p"},"conda_env")),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-python"},"{'name': 'mlflow-env',\n 'channels': ['conda-forge'],\n 'dependencies': ['python=3.8.10',\n 'pip',\n {'pip': ['mlflow', 'dill', 'pandas', 'scikit-learn']}]}\n")))),(0,r.kt)("h3",{id:"3-save-mlflow-infos"},"3. Save MLFLow Infos"),(0,r.kt)("p",null,"Next, we save the learned information and the model. Since the trained model uses the sklearn package, we can easily save the model using ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow.sklearn"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from mlflow.sklearn import save_model\n\nsave_model(\n sk_model=clf,\n path="svc",\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n)\n')),(0,r.kt)("p",null,"If you work locally, a svc folder will be created and the following files will be generated."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"ls svc\n")),(0,r.kt)("p",null,"If you execute the command above, you can check the following output value."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"MLmodel conda.yaml input_example.json model.pkl requirements.txt\n")),(0,r.kt)("p",null,"Each file will be as follows if checked."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"MLmodel"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'flavors:\n python_function:\n env: conda.yaml\n loader_module: mlflow.sklearn\n model_path: model.pkl\n python_version: 3.8.10\n sklearn:\n pickled_model: model.pkl\n serialization_format: cloudpickle\n sklearn_version: 1.0.1\nsaved_input_example_info:\n artifact_path: input_example.json\n pandas_orient: split\n type: dataframe\nsignature:\n inputs: \'[{"name": "sepal length (cm)", "type": "double"}, {"name": "sepal width\n (cm)", "type": "double"}, {"name": "petal length (cm)", "type": "double"}, {"name":\n "petal width (cm)", "type": "double"}]\'\n outputs: \'[{"type": "tensor", "tensor-spec": {"dtype": "int64", "shape": [-1]}}]\'\nutc_time_created: \'2021-12-06 06:52:30.612810\'\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"conda.yaml"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"channels:\n- conda-forge\ndependencies:\n- python=3.8.10\n- pip\n- pip:\n - mlflow\n - dill\n - pandas\n - scikit-learn\nname: mlflow-env\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"input_example.json"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'{\n "columns": \n [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ],\n "data": \n [\n [6.7, 3.1, 4.4, 1.4]\n ]\n}\n'))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"requirements.txt"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"mlflow\ndill\npandas\nscikit-learn\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"model.pkl"))),(0,r.kt)("h2",{id:"mlflow-on-server"},"MLFlow on Server"),(0,r.kt)("p",null,"Now, let's proceed with the task of uploading the saved model to the MLflow server."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'import mlflow\n\nwith mlflow.start_run():\n mlflow.log_artifact("svc/")\n')),(0,r.kt)("p",null,"Save and open the ",(0,r.kt)("inlineCode",{parentName:"p"},"mlruns")," directory generated path with ",(0,r.kt)("inlineCode",{parentName:"p"},"mlflow ui")," command to launch mlflow server and dashboard.\nAccess the mlflow dashboard, click the generated run to view it as below."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-0.png",src:a(8730).Z,width:"2782",height:"2496"}),"\n(This screen may vary depending on the version of mlflow.)"),(0,r.kt)("h2",{id:"mlflow-component-1"},"MLFlow Component"),(0,r.kt)("p",null,"Now, let's write a reusable component in Kubeflow."),(0,r.kt)("p",null,"The ways of writing components that can be reused are broadly divided into three categories."),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"After saving the necessary environment in the component responsible for model training, the MLflow component is only responsible for the upload."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-1.png",src:a(6694).Z,width:"578",height:"844"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"Pass the trained model and data to the MLflow component, which is responsible for saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-2.png",src:a(5944).Z,width:"900",height:"846"}))),(0,r.kt)("li",{parentName:"ol"},(0,r.kt)("p",{parentName:"li"},"The component responsible for model training handles both saving and uploading."),(0,r.kt)("p",{parentName:"li"},(0,r.kt)("img",{alt:"mlflow-3.png",src:a(5109).Z,width:"578",height:"406"})))),(0,r.kt)("p",null,"We are trying to manage the model through the first approach.\nThe reason is that we don't need to write the code to upload the MLFlow model every time like three times for each component written."),(0,r.kt)("p",null,"Reusing components is possible by the methods 1 and 2.\nHowever, in the case of 2, it is necessary to deliver the trained image and packages to the component, so ultimately additional information about the component must be delivered."),(0,r.kt)("p",null,"In order to proceed with the method 1, the learning component must also be changed.\nCode that stores the environment needed to save the model must be added."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n')),(0,r.kt)("p",null,"Write a component to upload to MLFlow.\nAt this time, configure the uploaded MLFlow endpoint to be connected to the ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-mlflow"},"mlflow service")," that we installed.",(0,r.kt)("br",{parentName:"p"}),"\n","In this case, use the Kubernetes Service DNS Name of the Minio installed at the time of MLFlow Server installation. As this service is created in the Kubeflow namespace with the name minio-service, set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://minio-service.kubeflow.svc:9000"),".",(0,r.kt)("br",{parentName:"p"}),"\n","Similarly, for the tracking_uri address, use the Kubernetes Service DNS Name of the MLFlow server and set it to ",(0,r.kt)("inlineCode",{parentName:"p"},"http://mlflow-server-service.mlflow-system.svc:5000"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\nfrom kfp.components import InputPath, create_component_from_func\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n')),(0,r.kt)("h2",{id:"mlflow-pipeline"},"MLFlow Pipeline"),(0,r.kt)("p",null,"Now let's connect the components we have written and create a pipeline. "),(0,r.kt)("h3",{id:"data-component"},"Data Component"),(0,r.kt)("p",null,"The data we will use to train the model is sklearn's iris.\nWe will write a component to generate the data."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n')),(0,r.kt)("h3",{id:"pipeline"},"Pipeline"),(0,r.kt)("p",null,"The pipeline code can be written as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from kfp.dsl import pipeline\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n')),(0,r.kt)("h3",{id:"run"},"Run"),(0,r.kt)("p",null,"If you organize the components and pipelines written above into a single Python file, it would look like this."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-python"},'from functools import partial\n\nimport kfp\nfrom kfp.components import InputPath, OutputPath, create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["pandas", "scikit-learn"],\n)\ndef load_iris_data(\n data_path: OutputPath("csv"),\n target_path: OutputPath("csv"),\n):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],\n)\ndef train_from_csv(\n train_data_path: InputPath("csv"),\n train_target_path: InputPath("csv"),\n model_path: OutputPath("dill"),\n input_example_path: OutputPath("dill"),\n signature_path: OutputPath("dill"),\n conda_env_path: OutputPath("dill"),\n kernel: str,\n):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n\n@partial(\n create_component_from_func,\n packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],\n)\ndef upload_sklearn_model_to_mlflow(\n model_name: str,\n model_path: InputPath("dill"),\n input_example_path: InputPath("dill"),\n signature_path: InputPath("dill"),\n conda_env_path: InputPath("dill"),\n):\n import os\n import dill\n from mlflow.sklearn import save_model\n \n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n\n@pipeline(name="mlflow_pipeline")\ndef mlflow_pipeline(kernel: str, model_name: str):\n iris_data = load_iris_data()\n model = train_from_csv(\n train_data=iris_data.outputs["data"],\n train_target=iris_data.outputs["target"],\n kernel=kernel,\n )\n _ = upload_sklearn_model_to_mlflow(\n model_name=model_name,\n model=model.outputs["model"],\n input_example=model.outputs["input_example"],\n signature=model.outputs["signature"],\n conda_env=model.outputs["conda_env"],\n )\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")\n')),(0,r.kt)("p",null,(0,r.kt)("details",null,(0,r.kt)("summary",null,"mlflow_pipeline.yaml"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: mlflow-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: \'2022-01-19T14:14:11.999807\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "kernel", "type":\n "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}\nspec:\n entrypoint: mlflow-pipeline\n templates:\n - name: load-iris-data\n container:\n args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'pandas\' \'scikit-learn\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'pandas\' \'scikit-learn\' --user)\n && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def load_iris_data(\n data_path,\n target_path,\n ):\n import pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data = pd.DataFrame(iris["data"], columns=iris["feature_names"])\n target = pd.DataFrame(iris["target"], columns=["target"])\n\n data.to_csv(data_path, index=False)\n target.to_csv(target_path, index=False)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Load iris data\', description=\'\')\n _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = load_iris_data(**_parsed_args)\n image: python:3.7\n outputs:\n artifacts:\n - {name: load-iris-data-data, path: /tmp/outputs/data/data}\n - {name: load-iris-data-target, path: /tmp/outputs/target/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'pandas\'\' \'\'scikit-learn\'\' ||\n PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'\'pandas\'\' \'\'scikit-learn\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef load_iris_data(\\n data_path,\\n target_path,\\n):\\n import\n pandas as pd\\n from sklearn.datasets import load_iris\\n\\n iris = load_iris()\\n\\n data\n = pd.DataFrame(iris[\\"data\\"], columns=iris[\\"feature_names\\"])\\n target\n = pd.DataFrame(iris[\\"target\\"], columns=[\\"target\\"])\\n\\n data.to_csv(data_path,\n index=False)\\n target.to_csv(target_path, index=False)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Load iris data\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--data\\",\n dest=\\"data_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--target\\", dest=\\"target_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = load_iris_data(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":\n "data", "type": "csv"}, {"name": "target", "type": "csv"}]}\', pipelines.kubeflow.org/component_ref: \'{}\'}\n - name: mlflow-pipeline\n inputs:\n parameters:\n - {name: kernel}\n - {name: model_name}\n dag:\n tasks:\n - {name: load-iris-data, template: load-iris-data}\n - name: train-from-csv\n template: train-from-csv\n dependencies: [load-iris-data]\n arguments:\n parameters:\n - {name: kernel, value: \'{{inputs.parameters.kernel}}\'}\n artifacts:\n - {name: load-iris-data-data, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}\'}\n - {name: load-iris-data-target, from: \'{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}\'}\n - name: upload-sklearn-model-to-mlflow\n template: upload-sklearn-model-to-mlflow\n dependencies: [train-from-csv]\n arguments:\n parameters:\n - {name: model_name, value: \'{{inputs.parameters.model_name}}\'}\n artifacts:\n - {name: train-from-csv-conda_env, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}\'}\n - {name: train-from-csv-input_example, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}\'}\n - {name: train-from-csv-model, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}\'}\n - {name: train-from-csv-signature, from: \'{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}\'}\n - name: train-from-csv\n container:\n args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,\n --kernel, \'{{inputs.parameters.kernel}}\', --model, /tmp/outputs/model/data,\n --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,\n --conda-env, /tmp/outputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\n def train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n ):\n import dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from mlflow.models.signature import infer_signature\n from mlflow.utils.environment import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data, train_target)\n\n with open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path, "wb") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path, "wb") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env = _mlflow_conda_env(\n additional_pip_deps=["dill", "pandas", "scikit-learn"]\n )\n with open(conda_env_path, "wb") as file_writer:\n dill.dump(conda_env, file_writer)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Train from csv\', description=\'\')\n _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = train_from_csv(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: kernel}\n artifacts:\n - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}\n - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}\n outputs:\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/outputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",\n {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",\n {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},\n "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":\n "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\'\n \'\'scikit-learn\'\' \'\'mlflow\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m\n pip install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def _make_parent_dirs_and_return_path(file_path: str):\\n import os\\n os.makedirs(os.path.dirname(file_path),\n exist_ok=True)\\n return file_path\\n\\ndef train_from_csv(\\n train_data_path,\\n train_target_path,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n kernel,\\n):\\n import\n dill\\n import pandas as pd\\n from sklearn.svm import SVC\\n\\n from\n mlflow.models.signature import infer_signature\\n from mlflow.utils.environment\n import _mlflow_conda_env\\n\\n train_data = pd.read_csv(train_data_path)\\n train_target\n = pd.read_csv(train_target_path)\\n\\n clf = SVC(kernel=kernel)\\n clf.fit(train_data,\n train_target)\\n\\n with open(model_path, mode=\\"wb\\") as file_writer:\\n dill.dump(clf,\n file_writer)\\n\\n input_example = train_data.sample(1)\\n with open(input_example_path,\n \\"wb\\") as file_writer:\\n dill.dump(input_example, file_writer)\\n\\n signature\n = infer_signature(train_data, clf.predict(train_data))\\n with open(signature_path,\n \\"wb\\") as file_writer:\\n dill.dump(signature, file_writer)\\n\\n conda_env\n = _mlflow_conda_env(\\n additional_pip_deps=[\\"dill\\", \\"pandas\\",\n \\"scikit-learn\\"]\\n )\\n with open(conda_env_path, \\"wb\\") as file_writer:\\n dill.dump(conda_env,\n file_writer)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Train\n from csv\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--train-data\\", dest=\\"train_data_path\\",\n type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--train-target\\",\n dest=\\"train_target_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--kernel\\",\n dest=\\"kernel\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\", dest=\\"input_example_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=_make_parent_dirs_and_return_path, required=True,\n default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\", dest=\\"conda_env_path\\",\n type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = train_from_csv(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},\n {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],\n "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},\n {"name": "input_example", "type": "dill"}, {"name": "signature", "type":\n "dill"}, {"name": "conda_env", "type": "dill"}]}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"kernel": "{{inputs.parameters.kernel}}"}\'}\n - name: upload-sklearn-model-to-mlflow\n container:\n args: [--model-name, \'{{inputs.parameters.model_name}}\', --model, /tmp/inputs/model/data,\n --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,\n --conda-env, /tmp/inputs/conda_env/data]\n command:\n - sh\n - -c\n - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location\n \'dill\' \'pandas\' \'scikit-learn\' \'mlflow\' \'boto3\' || PIP_DISABLE_PIP_VERSION_CHECK=1\n python3 -m pip install --quiet --no-warn-script-location \'dill\' \'pandas\' \'scikit-learn\'\n \'mlflow\' \'boto3\' --user) && "$0" "$@"\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n ):\n import os\n import dill\n from mlflow.sklearn import save_model\n\n from mlflow.tracking.client import MlflowClient\n\n os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"\n os.environ["AWS_ACCESS_KEY_ID"] = "minio"\n os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"\n\n client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")\n\n with open(model_path, mode="rb") as file_reader:\n clf = dill.load(file_reader)\n\n with open(input_example_path, "rb") as file_reader:\n input_example = dill.load(file_reader)\n\n with open(signature_path, "rb") as file_reader:\n signature = dill.load(file_reader)\n\n with open(conda_env_path, "rb") as file_reader:\n conda_env = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format="cloudpickle",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run = client.create_run(experiment_id="0")\n client.log_artifact(run.info.run_id, model_name)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Upload sklearn model to mlflow\', description=\'\')\n _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: model_name}\n artifacts:\n - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}\n - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}\n - {name: train-from-csv-model, path: /tmp/inputs/model/data}\n - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}\n metadata:\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.10\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n annotations: {pipelines.kubeflow.org/component_spec: \'{"implementation": {"container":\n {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":\n "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",\n {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],\n "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip\n install --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\'\n \'\'mlflow\'\' \'\'boto3\'\' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install\n --quiet --no-warn-script-location \'\'dill\'\' \'\'pandas\'\' \'\'scikit-learn\'\' \'\'mlflow\'\'\n \'\'boto3\'\' --user) && \\"$0\\" \\"$@\\"", "sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def upload_sklearn_model_to_mlflow(\\n model_name,\\n model_path,\\n input_example_path,\\n signature_path,\\n conda_env_path,\\n):\\n import\n os\\n import dill\\n from mlflow.sklearn import save_model\\n\\n from\n mlflow.tracking.client import MlflowClient\\n\\n os.environ[\\"MLFLOW_S3_ENDPOINT_URL\\"]\n = \\"http://minio-service.kubeflow.svc:9000\\"\\n os.environ[\\"AWS_ACCESS_KEY_ID\\"]\n = \\"minio\\"\\n os.environ[\\"AWS_SECRET_ACCESS_KEY\\"] = \\"minio123\\"\\n\\n client\n = MlflowClient(\\"http://mlflow-server-service.mlflow-system.svc:5000\\")\\n\\n with\n open(model_path, mode=\\"rb\\") as file_reader:\\n clf = dill.load(file_reader)\\n\\n with\n open(input_example_path, \\"rb\\") as file_reader:\\n input_example\n = dill.load(file_reader)\\n\\n with open(signature_path, \\"rb\\") as file_reader:\\n signature\n = dill.load(file_reader)\\n\\n with open(conda_env_path, \\"rb\\") as file_reader:\\n conda_env\n = dill.load(file_reader)\\n\\n save_model(\\n sk_model=clf,\\n path=model_name,\\n serialization_format=\\"cloudpickle\\",\\n conda_env=conda_env,\\n signature=signature,\\n input_example=input_example,\\n )\\n run\n = client.create_run(experiment_id=\\"0\\")\\n client.log_artifact(run.info.run_id,\n model_name)\\n\\nimport argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Upload\n sklearn model to mlflow\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--model-name\\",\n dest=\\"model_name\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--model\\",\n dest=\\"model_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--input-example\\",\n dest=\\"input_example_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--signature\\",\n dest=\\"signature_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--conda-env\\",\n dest=\\"conda_env_path\\", type=str, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},\n {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},\n {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],\n "name": "Upload sklearn model to mlflow"}\', pipelines.kubeflow.org/component_ref: \'{}\',\n pipelines.kubeflow.org/arguments.parameters: \'{"model_name": "{{inputs.parameters.model_name}}"}\'}\n arguments:\n parameters:\n - {name: kernel}\n - {name: model_name}\n serviceAccountName: pipeline-runner\n')))),(0,r.kt)("p",null,"After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-0",src:a(5041).Z,width:"3408",height:"2156"})),(0,r.kt)("p",null,"Port-forward the mlflow service to access the MLflow UI."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000\n")),(0,r.kt)("p",null,"Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-1",src:a(1757).Z,width:"3360",height:"2100"})),(0,r.kt)("p",null,"Click on run to verify that the trained model file is present."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"mlflow-svc-2",src:a(1874).Z,width:"3360",height:"2100"})))}_.isMDXComponent=!0},8730:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-0-95d5ec759ef43b21c9c3b22abb64366d.png"},6694:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-1-a096f3eda2246a1c132fc13ce3180ef5.png"},5944:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-2-3cd7cf7e2c853a1242cff7c65e56cf3f.png"},5109:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-3-8b187057bb18f27b1744656ef6d045a1.png"},5041:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-0-ab6c5d7f00bf643c36d236155dc5eb9c.png"},1757:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-1-7723b8f92fb8cea2ff99b8f4639ff0c6.png"},1874:(e,n,a)=>{a.d(n,{Z:()=>t});const t=a.p+"assets/images/mlflow-svc-2-8b696bd65a922f949877102bbfdafc42.png"}}]); \ No newline at end of file diff --git a/en/assets/js/d1b5315b.ee12d5ba.js b/en/assets/js/d1b5315b.2f2dfd89.js similarity index 98% rename from en/assets/js/d1b5315b.ee12d5ba.js rename to en/assets/js/d1b5315b.2f2dfd89.js index a6aba631..3f6f972a 100644 --- a/en/assets/js/d1b5315b.ee12d5ba.js +++ b/en/assets/js/d1b5315b.2f2dfd89.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9680],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var i=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=i.createContext({}),d=function(e){var t=i.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return i.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},m=i.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=d(n),m=a,f=u["".concat(s,".").concat(m)]||u[m]||c[m]||o;return n?i.createElement(f,r(r({ref:t},p),{},{components:n})):i.createElement(f,r({ref:t},p))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,r=new Array(o);r[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:a,r[1]=l;for(var d=2;d{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>d});var i=n(7462),a=(n(7294),n(3905));const o={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},r=void 0,l={unversionedId:"introduction/component",id:"version-1.0/introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/en/docs/1.0/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/en/docs/1.0/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/en/docs/1.0/introduction/why_kubernetes"}},s={},d=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model Training",id:"3-model-training",level:3},{value:"4. Model Evaluation",id:"4-model-evaluation",level:3},{value:"5. Model Serving",id:"5-model-serving",level:3},{value:"6. Online Experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],p={toc:d},u="wrapper";function c(e){let{components:t,...o}=e;return(0,a.kt)(u,(0,i.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,a.kt)("p",null,"Google's white paper ","[Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning]"," published in May 2021 mentions the following core functionalities of MLOps: "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlops-component",src:n(9324).Z,width:"2352",height:"1890"})),(0,a.kt)("p",null,"Let's look at what each feature does."),(0,a.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,a.kt)("p",null,"Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Integration with version control tools like Git and a notebook (Jupyter Notebook) environment"),(0,a.kt)("li",{parentName:"ul"},"Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics"),(0,a.kt)("li",{parentName:"ul"},"Data and model analysis and visualization capabilities")),(0,a.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,a.kt)("p",null,"Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Data connectors compatible with various data sources and services"),(0,a.kt)("li",{parentName:"ul"},"Data encoders and decoders compatible with different data formats"),(0,a.kt)("li",{parentName:"ul"},"Data transformation and feature engineering capabilities for different data types"),(0,a.kt)("li",{parentName:"ul"},"Scalable batch and streaming data processing capabilities for training and serving")),(0,a.kt)("h3",{id:"3-model-training"},"3. Model Training"),(0,a.kt)("p",null,"Model Training offers functionalities to efficiently execute algorithms for model training:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Environment provisioning for ML framework execution"),(0,a.kt)("li",{parentName:"ul"},"Distributed training environment for multiple GPUs and distributed training"),(0,a.kt)("li",{parentName:"ul"},"Hyperparameter tuning and optimization capabilities")),(0,a.kt)("h3",{id:"4-model-evaluation"},"4. Model Evaluation"),(0,a.kt)("p",null,"Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model performance evaluation on evaluation datasets"),(0,a.kt)("li",{parentName:"ul"},"Tracking prediction performance across different continuous training runs"),(0,a.kt)("li",{parentName:"ul"},"Comparison and visualization of performance between different models"),(0,a.kt)("li",{parentName:"ul"},"Model output interpretation using interpretable AI techniques")),(0,a.kt)("h3",{id:"5-model-serving"},"5. Model Serving"),(0,a.kt)("p",null,"Model serving offers functionalities to deploy and serve models in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Low-latency and high-availability inference capabilities"),(0,a.kt)("li",{parentName:"ul"},"Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)"),(0,a.kt)("li",{parentName:"ul"},"Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results"),(0,a.kt)("li",{parentName:"ul"},"Autoscaling capabilities to handle spiking inference requests"),(0,a.kt)("li",{parentName:"ul"},"Logging of inference requests and results")),(0,a.kt)("h3",{id:"6-online-experimentation"},"6. Online Experimentation"),(0,a.kt)("p",null,"Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Canary and shadow deployment features"),(0,a.kt)("li",{parentName:"ul"},"A/B testing capabilities"),(0,a.kt)("li",{parentName:"ul"},"Multi-armed bandit testing functionality")),(0,a.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,a.kt)("p",null,"Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates."),(0,a.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,a.kt)("p",null,"ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Pipeline execution through various event sources"),(0,a.kt)("li",{parentName:"ul"},"ML metadata tracking and integration for pipeline parameter and artifact management"),(0,a.kt)("li",{parentName:"ul"},"Support for built-in components for common ML tasks and user-defined components"),(0,a.kt)("li",{parentName:"ul"},"Provisioning of different execution environments")),(0,a.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,a.kt)("p",null,"The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Registration, tracking, and versioning of trained and deployed models"),(0,a.kt)("li",{parentName:"ul"},"Storage of information about the required data and runtime packages for deployment")),(0,a.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Sharing, search, reuse, and versioning capabilities for datasets"),(0,a.kt)("li",{parentName:"ul"},"Real-time processing and low-latency serving capabilities for event streaming and online inference tasks"),(0,a.kt)("li",{parentName:"ul"},"Support for various types of data, such as images, text, and tabular data")),(0,a.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,a.kt)("p",null,"In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"History management for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Tracking and sharing of experiments and pipeline parameter configurations"),(0,a.kt)("li",{parentName:"ul"},"Storage, access, visualization, and download capabilities for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Integration with other MLOps functionalities")))}c.isMDXComponent=!0},9324:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[9680],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>f});var i=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);t&&(i=i.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,i)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(i=0;i=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=i.createContext({}),d=function(e){var t=i.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return i.createElement(s.Provider,{value:t},e.children)},u="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return i.createElement(i.Fragment,{},t)}},m=i.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),u=d(n),m=a,f=u["".concat(s,".").concat(m)]||u[m]||c[m]||o;return n?i.createElement(f,r(r({ref:t},p),{},{components:n})):i.createElement(f,r({ref:t},p))}));function f(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,r=new Array(o);r[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[u]="string"==typeof e?e:a,r[1]=l;for(var d=2;d{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>d});var i=n(7462),a=(n(7294),n(3905));const o={title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Youngcheol Jang"]},r=void 0,l={unversionedId:"introduction/component",id:"version-1.0/introduction/component",title:"3. Components of MLOps",description:"Describe MLOps Components",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/introduction/component.md",sourceDirName:"introduction",slug:"/introduction/component",permalink:"/en/docs/1.0/introduction/component",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/introduction/component.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Components of MLOps",description:"Describe MLOps Components",sidebar_position:3,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Youngcheol Jang"]},sidebar:"tutorialSidebar",previous:{title:"2. Levels of MLOps",permalink:"/en/docs/1.0/introduction/levels"},next:{title:"4. Why Kubernetes?",permalink:"/en/docs/1.0/introduction/why_kubernetes"}},s={},d=[{value:"Practitioners guide to MLOps",id:"practitioners-guide-to-mlops",level:2},{value:"1. Experimentation",id:"1-experimentation",level:3},{value:"2. Data Processing",id:"2-data-processing",level:3},{value:"3. Model Training",id:"3-model-training",level:3},{value:"4. Model Evaluation",id:"4-model-evaluation",level:3},{value:"5. Model Serving",id:"5-model-serving",level:3},{value:"6. Online Experimentation",id:"6-online-experimentation",level:3},{value:"7. Model Monitoring",id:"7-model-monitoring",level:3},{value:"8. ML Pipeline",id:"8-ml-pipeline",level:3},{value:"9. Model Registry",id:"9-model-registry",level:3},{value:"10. Dataset and Feature Repository",id:"10-dataset-and-feature-repository",level:3},{value:"11. ML Metadata and Artifact Tracking",id:"11-ml-metadata-and-artifact-tracking",level:3}],p={toc:d},u="wrapper";function c(e){let{components:t,...o}=e;return(0,a.kt)(u,(0,i.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"practitioners-guide-to-mlops"},"Practitioners guide to MLOps"),(0,a.kt)("p",null,"Google's white paper ","[Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning]"," published in May 2021 mentions the following core functionalities of MLOps: "),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"mlops-component",src:n(9324).Z,width:"2352",height:"1890"})),(0,a.kt)("p",null,"Let's look at what each feature does."),(0,a.kt)("h3",{id:"1-experimentation"},"1. Experimentation"),(0,a.kt)("p",null,"Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Integration with version control tools like Git and a notebook (Jupyter Notebook) environment"),(0,a.kt)("li",{parentName:"ul"},"Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics"),(0,a.kt)("li",{parentName:"ul"},"Data and model analysis and visualization capabilities")),(0,a.kt)("h3",{id:"2-data-processing"},"2. Data Processing"),(0,a.kt)("p",null,"Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Data connectors compatible with various data sources and services"),(0,a.kt)("li",{parentName:"ul"},"Data encoders and decoders compatible with different data formats"),(0,a.kt)("li",{parentName:"ul"},"Data transformation and feature engineering capabilities for different data types"),(0,a.kt)("li",{parentName:"ul"},"Scalable batch and streaming data processing capabilities for training and serving")),(0,a.kt)("h3",{id:"3-model-training"},"3. Model Training"),(0,a.kt)("p",null,"Model Training offers functionalities to efficiently execute algorithms for model training:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Environment provisioning for ML framework execution"),(0,a.kt)("li",{parentName:"ul"},"Distributed training environment for multiple GPUs and distributed training"),(0,a.kt)("li",{parentName:"ul"},"Hyperparameter tuning and optimization capabilities")),(0,a.kt)("h3",{id:"4-model-evaluation"},"4. Model Evaluation"),(0,a.kt)("p",null,"Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model performance evaluation on evaluation datasets"),(0,a.kt)("li",{parentName:"ul"},"Tracking prediction performance across different continuous training runs"),(0,a.kt)("li",{parentName:"ul"},"Comparison and visualization of performance between different models"),(0,a.kt)("li",{parentName:"ul"},"Model output interpretation using interpretable AI techniques")),(0,a.kt)("h3",{id:"5-model-serving"},"5. Model Serving"),(0,a.kt)("p",null,"Model serving offers functionalities to deploy and serve models in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Low-latency and high-availability inference capabilities"),(0,a.kt)("li",{parentName:"ul"},"Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)"),(0,a.kt)("li",{parentName:"ul"},"Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results"),(0,a.kt)("li",{parentName:"ul"},"Autoscaling capabilities to handle spiking inference requests"),(0,a.kt)("li",{parentName:"ul"},"Logging of inference requests and results")),(0,a.kt)("h3",{id:"6-online-experimentation"},"6. Online Experimentation"),(0,a.kt)("p",null,"Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Canary and shadow deployment features"),(0,a.kt)("li",{parentName:"ul"},"A/B testing capabilities"),(0,a.kt)("li",{parentName:"ul"},"Multi-armed bandit testing functionality")),(0,a.kt)("h3",{id:"7-model-monitoring"},"7. Model Monitoring"),(0,a.kt)("p",null,"Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates."),(0,a.kt)("h3",{id:"8-ml-pipeline"},"8. ML Pipeline"),(0,a.kt)("p",null,"ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Pipeline execution through various event sources"),(0,a.kt)("li",{parentName:"ul"},"ML metadata tracking and integration for pipeline parameter and artifact management"),(0,a.kt)("li",{parentName:"ul"},"Support for built-in components for common ML tasks and user-defined components"),(0,a.kt)("li",{parentName:"ul"},"Provisioning of different execution environments")),(0,a.kt)("h3",{id:"9-model-registry"},"9. Model Registry"),(0,a.kt)("p",null,"The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Registration, tracking, and versioning of trained and deployed models"),(0,a.kt)("li",{parentName:"ul"},"Storage of information about the required data and runtime packages for deployment")),(0,a.kt)("h3",{id:"10-dataset-and-feature-repository"},"10. Dataset and Feature Repository"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Sharing, search, reuse, and versioning capabilities for datasets"),(0,a.kt)("li",{parentName:"ul"},"Real-time processing and low-latency serving capabilities for event streaming and online inference tasks"),(0,a.kt)("li",{parentName:"ul"},"Support for various types of data, such as images, text, and tabular data")),(0,a.kt)("h3",{id:"11-ml-metadata-and-artifact-tracking"},"11. ML Metadata and Artifact Tracking"),(0,a.kt)("p",null,"In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"History management for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Tracking and sharing of experiments and pipeline parameter configurations"),(0,a.kt)("li",{parentName:"ul"},"Storage, access, visualization, and download capabilities for ML artifacts"),(0,a.kt)("li",{parentName:"ul"},"Integration with other MLOps functionalities")))}c.isMDXComponent=!0},9324:(e,t,n)=>{n.d(t,{Z:()=>i});const i=n.p+"assets/images/mlops-component-540cce1f22f97807b54c5e0dd1fec01e.png"}}]); \ No newline at end of file diff --git a/en/assets/js/d6cfd461.a151f46b.js b/en/assets/js/d6cfd461.8d27caef.js similarity index 99% rename from en/assets/js/d6cfd461.a151f46b.js rename to en/assets/js/d6cfd461.8d27caef.js index 9b4ab29a..a31326f7 100644 --- a/en/assets/js/d6cfd461.a151f46b.js +++ b/en/assets/js/d6cfd461.8d27caef.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4600],{3905:(e,n,t)=>{t.d(n,{Zo:()=>o,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),m=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},o=function(e){var n=m(e.components);return r.createElement(s.Provider,{value:n},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,o=i(e,["components","mdxType","originalType","parentName"]),_=m(t),d=a,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||p;return t?r.createElement(b,u(u({ref:n},o),{},{components:t})):r.createElement(b,u({ref:n},o))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[_]="string"==typeof e?e:a,u[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>p,metadata:()=>i,toc:()=>m});var r=t(7462),a=(t(7294),t(3905));const p={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"version-1.0/kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/en/docs/1.0/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/en/docs/1.0/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/en/docs/1.0/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],o={toc:m},_="wrapper";function l(e){let{components:n,...p}=e;return(0,a.kt)(_,(0,r.Z)({},o,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,a.kt)("p",null,"In this page, we will look at values that can be set in the pipeline."),(0,a.kt)("h2",{id:"display-name"},"Display Name"),(0,a.kt)("p",null,"Created within the pipeline, components have two names:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"task_name: the function name when writing the component"),(0,a.kt)("li",{parentName:"ul"},"display_name: the name that appears in the kubeflow UI")),(0,a.kt)("p",null,"For example, in the case where both components are set to Print and return number, it is difficult to tell which component is 1 or 2."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"run-7",src:t(510).Z,width:"3408",height:"2156"})),(0,a.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,a.kt)("p",null,"The solution for this is the display_name.",(0,a.kt)("br",{parentName:"p"}),"\n","We can set the display_name in the pipeline by using the set_display_name ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute")," of the component."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you run this script and check the resulting ",(0,a.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),", it would be like this."),(0,a.kt)("p",null,(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,a.kt)("p",null,"If compared with the previous file, the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"pipelines.kubeflow.org/task_display_name"))," key has been newly created."),(0,a.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,a.kt)("p",null,"We will upload the version of the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"pipeline")," using the files we created earlier."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"adv-pipeline-0.png",src:t(6562).Z,width:"3360",height:"2100"})),(0,a.kt)("p",null,"As you can see, the configured name is displayed as shown above."),(0,a.kt)("h2",{id:"resources"},"Resources"),(0,a.kt)("h3",{id:"gpu"},"GPU"),(0,a.kt)("p",null,"By default, when the pipeline runs components as Kubernetes pods, it uses the default resource specifications.",(0,a.kt)("br",{parentName:"p"}),"\n","If you need to train a model using a GPU and the Kubernetes environment doesn't allocate a GPU, the training may not be performed correctly.",(0,a.kt)("br",{parentName:"p"}),"\n","To address this, you can use the ",(0,a.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute")," to set the GPU limit."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you execute the above script, you can see that the resources has been added with ",(0,a.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," in the generated file when you look closely at ",(0,a.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),".\nThrough this, you can allocate a GPU."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,a.kt)("h3",{id:"cpu"},"CPU"),(0,a.kt)("p",null,"The function to set the number of CPUs can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," attribute ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),".",(0,a.kt)("br",{parentName:"p"}),"\n","The difference from GPUs is that the input must be a string, not an int."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The changed part only can be confirmed as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,a.kt)("h3",{id:"memory"},"Memory"),(0,a.kt)("p",null,"Memory can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,a.kt)("p",null,"The changed parts are as follows if checked."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},6562:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},510:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4600],{3905:(e,n,t)=>{t.d(n,{Zo:()=>o,kt:()=>b});var r=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function p(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function u(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=r.createContext({}),m=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):u(u({},n),e)),t},o=function(e){var n=m(e.components);return r.createElement(s.Provider,{value:n},e.children)},_="mdxType",l={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},d=r.forwardRef((function(e,n){var t=e.components,a=e.mdxType,p=e.originalType,s=e.parentName,o=i(e,["components","mdxType","originalType","parentName"]),_=m(t),d=a,b=_["".concat(s,".").concat(d)]||_[d]||l[d]||p;return t?r.createElement(b,u(u({ref:n},o),{},{components:t})):r.createElement(b,u({ref:n},o))}));function b(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var p=t.length,u=new Array(p);u[0]=d;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[_]="string"==typeof e?e:a,u[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>u,default:()=>l,frontMatter:()=>p,metadata:()=>i,toc:()=>m});var r=t(7462),a=(t(7294),t(3905));const p={title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},u=void 0,i={unversionedId:"kubeflow/advanced-pipeline",id:"version-1.0/kubeflow/advanced-pipeline",title:"10. Pipeline - Setting",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/advanced-pipeline.md",sourceDirName:"kubeflow",slug:"/kubeflow/advanced-pipeline",permalink:"/en/docs/1.0/kubeflow/advanced-pipeline",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/advanced-pipeline.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:10,frontMatter:{title:"10. Pipeline - Setting",description:"",sidebar_position:10,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"9. Component - Environment",permalink:"/en/docs/1.0/kubeflow/advanced-environment"},next:{title:"11. Pipeline - Run Result",permalink:"/en/docs/1.0/kubeflow/advanced-run"}},s={},m=[{value:"Pipeline Setting",id:"pipeline-setting",level:2},{value:"Display Name",id:"display-name",level:2},{value:"set_display_name",id:"set_display_name",level:3},{value:"UI in Kubeflow",id:"ui-in-kubeflow",level:3},{value:"Resources",id:"resources",level:2},{value:"GPU",id:"gpu",level:3},{value:"CPU",id:"cpu",level:3},{value:"Memory",id:"memory",level:3}],o={toc:m},_="wrapper";function l(e){let{components:n,...p}=e;return(0,a.kt)(_,(0,r.Z)({},o,p,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"pipeline-setting"},"Pipeline Setting"),(0,a.kt)("p",null,"In this page, we will look at values that can be set in the pipeline."),(0,a.kt)("h2",{id:"display-name"},"Display Name"),(0,a.kt)("p",null,"Created within the pipeline, components have two names:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"task_name: the function name when writing the component"),(0,a.kt)("li",{parentName:"ul"},"display_name: the name that appears in the kubeflow UI")),(0,a.kt)("p",null,"For example, in the case where both components are set to Print and return number, it is difficult to tell which component is 1 or 2."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"run-7",src:t(510).Z,width:"3408",height:"2156"})),(0,a.kt)("h3",{id:"set_display_name"},"set_display_name"),(0,a.kt)("p",null,"The solution for this is the display_name.",(0,a.kt)("br",{parentName:"p"}),"\n","We can set the display_name in the pipeline by using the set_display_name ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html#kfp.dsl.ContainerOp.set_display_name"},"attribute")," of the component."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you run this script and check the resulting ",(0,a.kt)("inlineCode",{parentName:"p"},"example_pipeline.yaml"),", it would be like this."),(0,a.kt)("p",null,(0,a.kt)("details",null,(0,a.kt)("summary",null,"example_pipeline.yaml"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: argoproj.io/v1alpha1\nkind: Workflow\nmetadata:\n generateName: example-pipeline-\n annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: \'2021-12-09T18:11:43.193190\',\n pipelines.kubeflow.org/pipeline_spec: \'{"inputs": [{"name": "number_1", "type":\n "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}\'}\n labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}\nspec:\n entrypoint: example-pipeline\n templates:\n - name: example-pipeline\n inputs:\n parameters:\n - {name: number_1}\n - {name: number_2}\n dag:\n tasks:\n - name: print-and-return-number\n template: print-and-return-number\n arguments:\n parameters:\n - {name: number_1, value: \'{{inputs.parameters.number_1}}\'}\n - name: print-and-return-number-2\n template: print-and-return-number-2\n arguments:\n parameters:\n - {name: number_2, value: \'{{inputs.parameters.number_2}}\'}\n - name: sum-and-print-numbers\n template: sum-and-print-numbers\n dependencies: [print-and-return-number, print-and-return-number-2]\n arguments:\n parameters:\n - {name: print-and-return-number-2-Output, value: \'{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}\'}\n - {name: print-and-return-number-Output, value: \'{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}\'}\n - name: print-and-return-number\n container:\n args: [--number, \'{{inputs.parameters.number_1}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_1}\n outputs:\n parameters:\n - name: print-and-return-number-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 1, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_1}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: print-and-return-number-2\n container:\n args: [--number, \'{{inputs.parameters.number_2}}\', \'----output-paths\', /tmp/outputs/Output/data]\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def print_and_return_number(number):\n print(number)\n return number\n\n def _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return int_value\n if not isinstance(int_value, int):\n raise TypeError(\'Value "{}" has type "{}" instead of int.\'.format(\n str(int_value), str(type(int_value))))\n return str(int_value)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Print and return number\', description=\'\')\n _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)\n _parsed_args = vars(_parser.parse_args())\n _output_files = _parsed_args.pop("_output_paths", [])\n\n _outputs = print_and_return_number(**_parsed_args)\n\n _outputs = [_outputs]\n\n _output_serializers = [\n _serialize_int,\n\n ]\n\n import os\n for idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, \'w\') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n image: python:3.7\n inputs:\n parameters:\n - {name: number_2}\n outputs:\n parameters:\n - name: print-and-return-number-2-Output\n valueFrom: {path: /tmp/outputs/Output/data}\n artifacts:\n - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number", {"inputValue": "number"}, "----output-paths",\n {"outputPath": "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def print_and_return_number(number):\\n print(number)\\n return number\\n\\ndef\n _serialize_int(int_value: int) -> str:\\n if isinstance(int_value, str):\\n return\n int_value\\n if not isinstance(int_value, int):\\n raise TypeError(\'\'Value\n \\"{}\\" has type \\"{}\\" instead of int.\'\'.format(\\n str(int_value),\n str(type(int_value))))\\n return str(int_value)\\n\\nimport argparse\\n_parser\n = argparse.ArgumentParser(prog=\'\'Print and return number\'\', description=\'\'\'\')\\n_parser.add_argument(\\"--number\\",\n dest=\\"number\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"----output-paths\\",\n dest=\\"_output_paths\\", type=str, nargs=1)\\n_parsed_args = vars(_parser.parse_args())\\n_output_files\n = _parsed_args.pop(\\"_output_paths\\", [])\\n\\n_outputs = print_and_return_number(**_parsed_args)\\n\\n_outputs\n = [_outputs]\\n\\n_output_serializers = [\\n _serialize_int,\\n\\n]\\n\\nimport\n os\\nfor idx, output_file in enumerate(_output_files):\\n try:\\n os.makedirs(os.path.dirname(output_file))\\n except\n OSError:\\n pass\\n with open(output_file, \'\'w\'\') as f:\\n f.write(_output_serializers[idx](_outputs[idx]))\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],\n "name": "Print and return number", "outputs": [{"name": "Output", "type":\n "Integer"}]}\', pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number":\n "{{inputs.parameters.number_2}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n inputs:\n parameters:\n - {name: print-and-return-number-2-Output}\n - {name: print-and-return-number-Output}\n metadata:\n annotations: {pipelines.kubeflow.org/task_display_name: This is sum of number\n 1 and number 2, pipelines.kubeflow.org/component_spec: \'{"implementation":\n {"container": {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2",\n {"inputValue": "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\\nprintf\n \\"%s\\" \\"$0\\" > \\"$program_path\\"\\npython3 -u \\"$program_path\\" \\"$@\\"\\n",\n "def sum_and_print_numbers(number_1, number_2):\\n print(number_1 + number_2)\\n\\nimport\n argparse\\n_parser = argparse.ArgumentParser(prog=\'\'Sum and print numbers\'\',\n description=\'\'\'\')\\n_parser.add_argument(\\"--number-1\\", dest=\\"number_1\\",\n type=int, required=True, default=argparse.SUPPRESS)\\n_parser.add_argument(\\"--number-2\\",\n dest=\\"number_2\\", type=int, required=True, default=argparse.SUPPRESS)\\n_parsed_args\n = vars(_parser.parse_args())\\n\\n_outputs = sum_and_print_numbers(**_parsed_args)\\n"],\n "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},\n {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}\',\n pipelines.kubeflow.org/component_ref: \'{}\', pipelines.kubeflow.org/arguments.parameters: \'{"number_1":\n "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}\'}\n labels:\n pipelines.kubeflow.org/kfp_sdk_version: 1.8.9\n pipelines.kubeflow.org/pipeline-sdk-type: kfp\n pipelines.kubeflow.org/enable_caching: "true"\n arguments:\n parameters:\n - {name: number_1}\n - {name: number_2}\n serviceAccountName: pipeline-runner\n')))),(0,a.kt)("p",null,"If compared with the previous file, the ",(0,a.kt)("strong",{parentName:"p"},(0,a.kt)("inlineCode",{parentName:"strong"},"pipelines.kubeflow.org/task_display_name"))," key has been newly created."),(0,a.kt)("h3",{id:"ui-in-kubeflow"},"UI in Kubeflow"),(0,a.kt)("p",null,"We will upload the version of the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-pipeline-upload#upload-pipeline-version"},"pipeline")," using the files we created earlier."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"adv-pipeline-0.png",src:t(6562).Z,width:"3360",height:"2100"})),(0,a.kt)("p",null,"As you can see, the configured name is displayed as shown above."),(0,a.kt)("h2",{id:"resources"},"Resources"),(0,a.kt)("h3",{id:"gpu"},"GPU"),(0,a.kt)("p",null,"By default, when the pipeline runs components as Kubernetes pods, it uses the default resource specifications.",(0,a.kt)("br",{parentName:"p"}),"\n","If you need to train a model using a GPU and the Kubernetes environment doesn't allocate a GPU, the training may not be performed correctly.",(0,a.kt)("br",{parentName:"p"}),"\n","To address this, you can use the ",(0,a.kt)("inlineCode",{parentName:"p"},"set_gpu_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.UserContainer.set_gpu_limit"},"attribute")," to set the GPU limit."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"If you execute the above script, you can see that the resources has been added with ",(0,a.kt)("inlineCode",{parentName:"p"},"{nvidia.com/gpu: 1}")," in the generated file when you look closely at ",(0,a.kt)("inlineCode",{parentName:"p"},"sum-and-print-numbers"),".\nThrough this, you can allocate a GPU."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},' - name: sum-and-print-numbers\n container:\n args: [--number-1, \'{{inputs.parameters.print-and-return-number-Output}}\', --number-2,\n \'{{inputs.parameters.print-and-return-number-2-Output}}\']\n command:\n - sh\n - -ec\n - |\n program_path=$(mktemp)\n printf "%s" "$0" > "$program_path"\n python3 -u "$program_path" "$@"\n - |\n def sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\n import argparse\n _parser = argparse.ArgumentParser(prog=\'Sum and print numbers\', description=\'\')\n _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)\n _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)\n _parsed_args = vars(_parser.parse_args())\n\n _outputs = sum_and_print_numbers(**_parsed_args)\n image: python:3.7\n resources:\n limits: {nvidia.com/gpu: 1}\n')),(0,a.kt)("h3",{id:"cpu"},"CPU"),(0,a.kt)("p",null,"The function to set the number of CPUs can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_cpu_limit()")," attribute ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_cpu_limit"},"attribute"),".",(0,a.kt)("br",{parentName:"p"}),"\n","The difference from GPUs is that the input must be a string, not an int."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n')),(0,a.kt)("p",null,"The changed part only can be confirmed as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, cpu: '16'}\n")),(0,a.kt)("h3",{id:"memory"},"Memory"),(0,a.kt)("p",null,"Memory can be set using the ",(0,a.kt)("inlineCode",{parentName:"p"},".set_memory_limit()")," ",(0,a.kt)("a",{parentName:"p",href:"https://kubeflow-pipelines.readthedocs.io/en/latest/source/kfp.dsl.html?highlight=set_gpu_limit#kfp.dsl.Sidecar.set_memory_limit"},"attribute"),"."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import kfp\nfrom kfp.components import create_component_from_func\nfrom kfp.dsl import pipeline\n\n\n@create_component_from_func\ndef print_and_return_number(number: int) -> int:\n print(number)\n return number\n\n\n@create_component_from_func\ndef sum_and_print_numbers(number_1: int, number_2: int):\n print(number_1 + number_2)\n\n\n@pipeline(name="example_pipeline")\ndef example_pipeline(number_1: int, number_2: int):\n number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")\n number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")\n sum_result = sum_and_print_numbers(\n number_1=number_1_result.output, number_2=number_2_result.output\n ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")\n\n\nif __name__ == "__main__":\n kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")\n\n')),(0,a.kt)("p",null,"The changed parts are as follows if checked."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"}," resources:\n limits: {nvidia.com/gpu: 1, memory: 1G}\n")))}l.isMDXComponent=!0},6562:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/adv-pipeline-0-16dd5e9fed2f2d5c4a1d1b683a7a144d.png"},510:(e,n,t)=>{t.d(n,{Z:()=>r});const r=t.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"}}]); \ No newline at end of file diff --git a/en/assets/js/d7dc9408.cbf64d96.js b/en/assets/js/d7dc9408.0ea13cc0.js similarity index 98% rename from en/assets/js/d7dc9408.cbf64d96.js rename to en/assets/js/d7dc9408.0ea13cc0.js index 4f7536e8..3f2d1187 100644 --- a/en/assets/js/d7dc9408.cbf64d96.js +++ b/en/assets/js/d7dc9408.0ea13cc0.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8225],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=u(n),f=o,d=p["".concat(s,".").concat(f)]||p[f]||m[f]||i;return n?r.createElement(d,a(a({ref:t},c),{},{components:n})):r.createElement(d,a({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,a[1]=l;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>m,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const i={title:"Community",sidebar_position:1},a=void 0,l={unversionedId:"community",id:"community",title:"Community",description:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",source:"@site/community/community.md",sourceDirName:".",slug:"/community",permalink:"/en/community/community",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/community.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"Community",sidebar_position:1},sidebar:"tutorialSidebar",next:{title:"How to Contribute",permalink:"/en/community/how-to-contribute"}},s={},u=[{value:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd",level:3},{value:"Question",id:"question",level:3},{value:"Suggestion",id:"suggestion",level:3}],c={toc:u},p="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h3",{id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd"},(0,o.kt)("em",{parentName:"h3"},"\ubaa8\ub450\uc758 MLOps")," \ub9b4\ub9ac\uc988 \uc18c\uc2dd"),(0,o.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub098 \uc218\uc815\uc0ac\ud56d\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/announcements"},"Announcements"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"question"},"Question"),(0,o.kt)("p",null,"\ud504\ub85c\uc81d\ud2b8 \ub0b4\uc6a9\uacfc \uad00\ub828\ub41c \uad81\uae08\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/q-a"},"Q&A"),"\ub97c \ud1b5\ud574 \uc9c8\ubb38\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"suggestion"},"Suggestion"),(0,o.kt)("p",null,"\uc81c\uc548\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/ideas"},"Ideas"),"\ub97c \ud1b5\ud574 \uc81c\uc548\ud574 \uc8fc\uc2dc\uba74 \ub429\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[8225],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var r=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function a(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):a(a({},t),e)),n},c=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},p="mdxType",m={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),p=u(n),f=o,d=p["".concat(s,".").concat(f)]||p[f]||m[f]||i;return n?r.createElement(d,a(a({ref:t},c),{},{components:n})):r.createElement(d,a({ref:t},c))}));function d(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,a=new Array(i);a[0]=f;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:o,a[1]=l;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>a,default:()=>m,frontMatter:()=>i,metadata:()=>l,toc:()=>u});var r=n(7462),o=(n(7294),n(3905));const i={title:"Community",sidebar_position:1},a=void 0,l={unversionedId:"community",id:"community",title:"Community",description:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",source:"@site/community/community.md",sourceDirName:".",slug:"/community",permalink:"/en/community/community",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/community/community.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"Community",sidebar_position:1},sidebar:"tutorialSidebar",next:{title:"How to Contribute",permalink:"/en/community/how-to-contribute"}},s={},u=[{value:"\ubaa8\ub450\uc758 MLOps \ub9b4\ub9ac\uc988 \uc18c\uc2dd",id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd",level:3},{value:"Question",id:"question",level:3},{value:"Suggestion",id:"suggestion",level:3}],c={toc:u},p="wrapper";function m(e){let{components:t,...n}=e;return(0,o.kt)(p,(0,r.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h3",{id:"\ubaa8\ub450\uc758-mlops-\ub9b4\ub9ac\uc988-\uc18c\uc2dd"},(0,o.kt)("em",{parentName:"h3"},"\ubaa8\ub450\uc758 MLOps")," \ub9b4\ub9ac\uc988 \uc18c\uc2dd"),(0,o.kt)("p",null,"\uc0c8\ub85c\uc6b4 \ud3ec\uc2a4\ud2b8\ub098 \uc218\uc815\uc0ac\ud56d\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/announcements"},"Announcements"),"\uc5d0\uc11c \ud655\uc778\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"question"},"Question"),(0,o.kt)("p",null,"\ud504\ub85c\uc81d\ud2b8 \ub0b4\uc6a9\uacfc \uad00\ub828\ub41c \uad81\uae08\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/q-a"},"Q&A"),"\ub97c \ud1b5\ud574 \uc9c8\ubb38\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."),(0,o.kt)("h3",{id:"suggestion"},"Suggestion"),(0,o.kt)("p",null,"\uc81c\uc548\uc810\uc740 ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/mlops-for-all/mlops-for-all.github.io/discussions/categories/ideas"},"Ideas"),"\ub97c \ud1b5\ud574 \uc81c\uc548\ud574 \uc8fc\uc2dc\uba74 \ub429\ub2c8\ub2e4."))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/d93ec163.4f350bad.js b/en/assets/js/d93ec163.4baf1b1b.js similarity index 99% rename from en/assets/js/d93ec163.4f350bad.js rename to en/assets/js/d93ec163.4baf1b1b.js index e100e221..16498ef7 100644 --- a/en/assets/js/d93ec163.4f350bad.js +++ b/en/assets/js/d93ec163.4baf1b1b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6628],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=r.createContext({}),u=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=u(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,p=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),c=u(n),f=i,m=c["".concat(p,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var o={};for(var p in t)hasOwnProperty.call(t,p)&&(o[p]=t[p]);o.originalType=e,o[c]="string"==typeof e?e:i,l[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>o,toc:()=>u});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,o={unversionedId:"kubeflow/basic-run",id:"version-1.0/kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/en/docs/1.0/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/en/docs/1.0/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/1.0/kubeflow/advanced-component"}},p={},u=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Select Create Run",id:"1-select-create-run",level:3},{value:"2. Select Experiment",id:"2-select-experiment",level:3},{value:"3. Enter Pipeline Config",id:"3-enter-pipeline-config",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"Now we will run the uploaded pipeline."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiments in Kubeflow are units that logically manage runs executed within them."),(0,i.kt)("p",null,"When you first enter the namespace in Kubeflow, there are no Experiments created. Therefore, you must create an Experiment beforehand in order to run the pipeline. If an Experiment already exists, you can go to ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"."),(0,i.kt)("p",null,"Experiments can be created via the Create Experiment button."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(3084).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-1.png",src:n(4643).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-select-create-run"},"1. Select Create Run"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(8145).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-select-experiment"},"2. Select Experiment"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(5387).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(6769).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-enter-pipeline-config"},"3. Enter Pipeline Config"),(0,i.kt)("p",null,"Fill in the values of the Config provided when creating the pipeline. The uploaded pipeline requires input values for ",(0,i.kt)("inlineCode",{parentName:"p"},"number_1")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"number_2"),"."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(8022).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"Click the Start button after entering the values. The pipeline will start running."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(2371).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"The executed pipelines can be viewed in the Runs tab.\nClicking on a run provides detailed information related to the executed pipeline."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(5496).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(2262).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"When a component has completed execution, it is marked with a green checkmark."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(510).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5)."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(2776).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},3084:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},4643:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},6769:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},8145:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},8022:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},2371:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},5496:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},2262:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},510:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},2776:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},5387:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6628],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var r=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var p=r.createContext({}),u=function(e){var t=r.useContext(p),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},s=function(e){var t=u(e.components);return r.createElement(p.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},f=r.forwardRef((function(e,t){var n=e.components,i=e.mdxType,a=e.originalType,p=e.parentName,s=o(e,["components","mdxType","originalType","parentName"]),c=u(n),f=i,m=c["".concat(p,".").concat(f)]||c[f]||d[f]||a;return n?r.createElement(m,l(l({ref:t},s),{},{components:n})):r.createElement(m,l({ref:t},s))}));function m(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var a=n.length,l=new Array(a);l[0]=f;var o={};for(var p in t)hasOwnProperty.call(t,p)&&(o[p]=t[p]);o.originalType=e,o[c]="string"==typeof e?e:i,l[1]=o;for(var u=2;u{n.r(t),n.d(t,{assets:()=>p,contentTitle:()=>l,default:()=>d,frontMatter:()=>a,metadata:()=>o,toc:()=>u});var r=n(7462),i=(n(7294),n(3905));const a={title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},l=void 0,o={unversionedId:"kubeflow/basic-run",id:"version-1.0/kubeflow/basic-run",title:"7. Pipeline - Run",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow/basic-run.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-run",permalink:"/en/docs/1.0/kubeflow/basic-run",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow/basic-run.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:7,frontMatter:{title:"7. Pipeline - Run",description:"",sidebar_position:7,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"6. Pipeline - Upload",permalink:"/en/docs/1.0/kubeflow/basic-pipeline-upload"},next:{title:"8. Component - InputPath/OutputPath",permalink:"/en/docs/1.0/kubeflow/advanced-component"}},p={},u=[{value:"Run Pipeline",id:"run-pipeline",level:2},{value:"Before Run",id:"before-run",level:2},{value:"1. Create Experiment",id:"1-create-experiment",level:3},{value:"2. Name \uc785\ub825",id:"2-name-\uc785\ub825",level:3},{value:"Run Pipeline",id:"run-pipeline-1",level:2},{value:"1. Select Create Run",id:"1-select-create-run",level:3},{value:"2. Select Experiment",id:"2-select-experiment",level:3},{value:"3. Enter Pipeline Config",id:"3-enter-pipeline-config",level:3},{value:"4. Start",id:"4-start",level:3},{value:"Run Result",id:"run-result",level:2}],s={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,i.kt)(c,(0,r.Z)({},s,a,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("h2",{id:"run-pipeline"},"Run Pipeline"),(0,i.kt)("p",null,"Now we will run the uploaded pipeline."),(0,i.kt)("h2",{id:"before-run"},"Before Run"),(0,i.kt)("h3",{id:"1-create-experiment"},"1. Create Experiment"),(0,i.kt)("p",null,"Experiments in Kubeflow are units that logically manage runs executed within them."),(0,i.kt)("p",null,"When you first enter the namespace in Kubeflow, there are no Experiments created. Therefore, you must create an Experiment beforehand in order to run the pipeline. If an Experiment already exists, you can go to ",(0,i.kt)("a",{parentName:"p",href:"/en/docs/1.0/kubeflow/basic-run#run-pipeline-1"},"Run Pipeline"),"."),(0,i.kt)("p",null,"Experiments can be created via the Create Experiment button."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-0.png",src:n(3084).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-name-\uc785\ub825"},"2. Name \uc785\ub825"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-1.png",src:n(4643).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-pipeline-1"},"Run Pipeline"),(0,i.kt)("h3",{id:"1-select-create-run"},"1. Select Create Run"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-2.png",src:n(8145).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"2-select-experiment"},"2. Select Experiment"),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-9.png",src:n(5387).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-10.png",src:n(6769).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"3-enter-pipeline-config"},"3. Enter Pipeline Config"),(0,i.kt)("p",null,"Fill in the values of the Config provided when creating the pipeline. The uploaded pipeline requires input values for ",(0,i.kt)("inlineCode",{parentName:"p"},"number_1")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"number_2"),"."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-3.png",src:n(8022).Z,width:"3360",height:"2100"})),(0,i.kt)("h3",{id:"4-start"},"4. Start"),(0,i.kt)("p",null,"Click the Start button after entering the values. The pipeline will start running."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-4.png",src:n(2371).Z,width:"3360",height:"2100"})),(0,i.kt)("h2",{id:"run-result"},"Run Result"),(0,i.kt)("p",null,"The executed pipelines can be viewed in the Runs tab.\nClicking on a run provides detailed information related to the executed pipeline."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-5.png",src:n(5496).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-6.png",src:n(2262).Z,width:"3360",height:"2100"})),(0,i.kt)("p",null,"When a component has completed execution, it is marked with a green checkmark."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-7.png",src:n(510).Z,width:"3408",height:"2156"})),(0,i.kt)("p",null,"If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5)."),(0,i.kt)("p",null,(0,i.kt)("img",{alt:"run-8.png",src:n(2776).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},3084:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-0-89a074cf253ad20e9315a21b2a3f0e9d.png"},4643:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-1-665e6047b848cee9383180a6a146a1a7.png"},6769:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-10-2177a6d36d33136d1b22445a2bfde87b.png"},8145:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-2-e1d4347b0c3974602d7f848dd39139a1.png"},8022:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-3-4d37c68448d8d5a8930ace230463e41e.png"},2371:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-4-b6f1160b622f53a449e9022b42a0969c.png"},5496:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-5-165361ea6e50ef9626ff848ca5901332.png"},2262:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-6-c0df9defda8fb66fd249cfe650168103.png"},510:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-7-53ba486fe934b320289bf98ddbf9a4b6.png"},2776:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-8-ffde114f1b8e8f33c58e40927a2d28c6.png"},5387:(e,t,n)=>{n.d(t,{Z:()=>r});const r=n.p+"assets/images/run-9-845cae1b0883fa77fb58717001557edb.png"}}]); \ No newline at end of file diff --git a/en/assets/js/d9523fd4.de07ede6.js b/en/assets/js/d9523fd4.1b6a6f0f.js similarity index 99% rename from en/assets/js/d9523fd4.de07ede6.js rename to en/assets/js/d9523fd4.1b6a6f0f.js index 28211151..b7b987f4 100644 --- a/en/assets/js/d9523fd4.de07ede6.js +++ b/en/assets/js/d9523fd4.1b6a6f0f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7298],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},c=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),m=u(n),c=a,h=m["".concat(s,".").concat(c)]||m[c]||d[c]||o;return n?r.createElement(h,l(l({ref:t},p),{},{components:n})):r.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=c;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:a,l[1]=i;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const o={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},l=void 0,i={unversionedId:"setup-kubernetes/intro",id:"version-1.0/setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/en/docs/1.0/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/en/docs/1.0/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/en/docs/1.0/setup-kubernetes/kubernetes"}},s={},u=[{value:"Build MLOps System",id:"build-mlops-system",level:2},{value:"Components",id:"components",level:2},{value:"Cluster",id:"cluster",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"Client",id:"client",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],p={toc:u},m="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(m,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"build-mlops-system"},"Build MLOps System"),(0,a.kt)("p",null,"The biggest barrier when studying MLOps is the difficulty of setting up and using an MLOps system. Using public cloud platforms like AWS or GCP, or commercial tools like Weights & Biases or neptune.ai, can be costly, and starting from scratch to build the entire environment can be overwhelming and confusing."),(0,a.kt)("p",null,"To address these challenges and help those who haven't been able to start with MLOps, ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will guide you on how to build and use an MLOps system from scratch, requiring only a desktop with Ubuntu installed."),(0,a.kt)("p",null,"For those who cannot prepare a Ubuntu desktop environment, use virtual machines to set up the environment."),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"If you are using Windows or an Intel-based Mac for the ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," practical exercises, you can prepare an Ubuntu desktop environment using virtual machine software such as VirtualBox or VMware. Please make sure to meet the recommended specifications when creating the virtual machine.\nHowever, for those using an M1 Mac, as of the date of writing (February 2022), VirtualBox and VMware are not available. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"Check if macOS apps are optimized for M1 Apple Silicon Mac"),")\nTherefore, if you are not using a cloud environment, you can install UTM, Virtual machines for Mac, to use virtual machines.\n(Purchasing and downloading software from the App Store is a form of donation-based payment. The free version is sufficient as it only differs in automatic updates.)\nThis virtual machine software supports the ",(0,a.kt)("em",{parentName:"p"},"Ubuntu 20.04.3 LTS")," practice operating system, enabling you to perform the exercises on an M1 Mac.")),(0,a.kt)("p",null,"However, since it is not possible to use all the elements described in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/introduction/component"},"Components of MLOps"),", ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will mainly focus on installing the representative open source software and connecting them to each other."),(0,a.kt)("p",null,"It is not meant that installing open source software in ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," is a standard, and we recommend choosing the appropriate tool that fits your situation."),(0,a.kt)("h2",{id:"components"},"Components"),(0,a.kt)("p",null,"The components of the MLOps system that we will make in this article and each version have been verified in the following environment."),(0,a.kt)("p",null,"To facilitate smooth testing, I will explain the setup of the ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,a.kt)("strong",{parentName:"p"},"Client")," as separate entities."),(0,a.kt)("p",null,"The ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," refers to a single desktop with Ubuntu installed.",(0,a.kt)("br",{parentName:"p"}),"\n","The ",(0,a.kt)("strong",{parentName:"p"},"Client")," is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes."),(0,a.kt)("h3",{id:"cluster"},"Cluster"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"Below is the list of software that needs to be installed on the Cluster:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"Below is the list of third-party software that needs to be installed using Helm:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"client"},"Client"),(0,a.kt)("p",null,"The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU: 6 cores"),(0,a.kt)("li",{parentName:"ul"},"RAM: 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK: 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU: NVIDIA GPU (optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7298],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>h});var r=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function l(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=r.createContext({}),u=function(e){var t=r.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},p=function(e){var t=u(e.components);return r.createElement(s.Provider,{value:t},e.children)},m="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return r.createElement(r.Fragment,{},t)}},c=r.forwardRef((function(e,t){var n=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),m=u(n),c=a,h=m["".concat(s,".").concat(c)]||m[c]||d[c]||o;return n?r.createElement(h,l(l({ref:t},p),{},{components:n})):r.createElement(h,l({ref:t},p))}));function h(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=n.length,l=new Array(o);l[0]=c;var i={};for(var s in t)hasOwnProperty.call(t,s)&&(i[s]=t[s]);i.originalType=e,i[m]="string"==typeof e?e:a,l[1]=i;for(var u=2;u{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>o,metadata:()=>i,toc:()=>u});var r=n(7462),a=(n(7294),n(3905));const o={title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},l=void 0,i={unversionedId:"setup-kubernetes/intro",id:"version-1.0/setup-kubernetes/intro",title:"1. Introduction",description:"Setup Introduction",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-kubernetes/intro.md",sourceDirName:"setup-kubernetes",slug:"/setup-kubernetes/intro",permalink:"/en/docs/1.0/setup-kubernetes/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-kubernetes/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Introduction",description:"Setup Introduction",sidebar_position:1,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim","Jongsun Shinn","Youngdon Tae","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Why Kubernetes?",permalink:"/en/docs/1.0/introduction/why_kubernetes"},next:{title:"2. Setup Kubernetes",permalink:"/en/docs/1.0/setup-kubernetes/kubernetes"}},s={},u=[{value:"Build MLOps System",id:"build-mlops-system",level:2},{value:"Components",id:"components",level:2},{value:"Cluster",id:"cluster",level:3},{value:"1. Software",id:"1-software",level:4},{value:"2. Helm Chart",id:"2-helm-chart",level:4},{value:"Client",id:"client",level:3},{value:"Minimum System Requirements",id:"minimum-system-requirements",level:3}],p={toc:u},m="wrapper";function d(e){let{components:t,...n}=e;return(0,a.kt)(m,(0,r.Z)({},p,n,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"build-mlops-system"},"Build MLOps System"),(0,a.kt)("p",null,"The biggest barrier when studying MLOps is the difficulty of setting up and using an MLOps system. Using public cloud platforms like AWS or GCP, or commercial tools like Weights & Biases or neptune.ai, can be costly, and starting from scratch to build the entire environment can be overwhelming and confusing."),(0,a.kt)("p",null,"To address these challenges and help those who haven't been able to start with MLOps, ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will guide you on how to build and use an MLOps system from scratch, requiring only a desktop with Ubuntu installed."),(0,a.kt)("p",null,"For those who cannot prepare a Ubuntu desktop environment, use virtual machines to set up the environment."),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"If you are using Windows or an Intel-based Mac for the ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," practical exercises, you can prepare an Ubuntu desktop environment using virtual machine software such as VirtualBox or VMware. Please make sure to meet the recommended specifications when creating the virtual machine.\nHowever, for those using an M1 Mac, as of the date of writing (February 2022), VirtualBox and VMware are not available. (",(0,a.kt)("a",{parentName:"p",href:"https://isapplesiliconready.com/kr"},"Check if macOS apps are optimized for M1 Apple Silicon Mac"),")\nTherefore, if you are not using a cloud environment, you can install UTM, Virtual machines for Mac, to use virtual machines.\n(Purchasing and downloading software from the App Store is a form of donation-based payment. The free version is sufficient as it only differs in automatic updates.)\nThis virtual machine software supports the ",(0,a.kt)("em",{parentName:"p"},"Ubuntu 20.04.3 LTS")," practice operating system, enabling you to perform the exercises on an M1 Mac.")),(0,a.kt)("p",null,"However, since it is not possible to use all the elements described in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/introduction/component"},"Components of MLOps"),", ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," will mainly focus on installing the representative open source software and connecting them to each other."),(0,a.kt)("p",null,"It is not meant that installing open source software in ",(0,a.kt)("em",{parentName:"p"},"MLOps for ALL")," is a standard, and we recommend choosing the appropriate tool that fits your situation."),(0,a.kt)("h2",{id:"components"},"Components"),(0,a.kt)("p",null,"The components of the MLOps system that we will make in this article and each version have been verified in the following environment."),(0,a.kt)("p",null,"To facilitate smooth testing, I will explain the setup of the ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," and ",(0,a.kt)("strong",{parentName:"p"},"Client")," as separate entities."),(0,a.kt)("p",null,"The ",(0,a.kt)("strong",{parentName:"p"},"Cluster")," refers to a single desktop with Ubuntu installed.",(0,a.kt)("br",{parentName:"p"}),"\n","The ",(0,a.kt)("strong",{parentName:"p"},"Client")," is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes."),(0,a.kt)("h3",{id:"cluster"},"Cluster"),(0,a.kt)("h4",{id:"1-software"},"1. Software"),(0,a.kt)("p",null,"Below is the list of software that needs to be installed on the Cluster:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Ubuntu"),(0,a.kt)("td",{parentName:"tr",align:null},"20.04.3 LTS")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Docker (Server)"),(0,a.kt)("td",{parentName:"tr",align:null},"20.10.11")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"NVIDIA Driver"),(0,a.kt)("td",{parentName:"tr",align:null},"470.86")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubernetes"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"Kubeflow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.4.0")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"MLFlow"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.0")))),(0,a.kt)("h4",{id:"2-helm-chart"},"2. Helm Chart"),(0,a.kt)("p",null,"Below is the list of third-party software that needs to be installed using Helm:"),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Helm Chart Repo Name"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"datawire/ambassador"),(0,a.kt)("td",{parentName:"tr",align:null},"6.9.3")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"seldonio/seldon-core-operator"),(0,a.kt)("td",{parentName:"tr",align:null},"1.11.2")))),(0,a.kt)("h3",{id:"client"},"Client"),(0,a.kt)("p",null,"The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04."),(0,a.kt)("table",null,(0,a.kt)("thead",{parentName:"table"},(0,a.kt)("tr",{parentName:"thead"},(0,a.kt)("th",{parentName:"tr",align:null},"Software"),(0,a.kt)("th",{parentName:"tr",align:null},"Version"))),(0,a.kt)("tbody",{parentName:"table"},(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kubectl"),(0,a.kt)("td",{parentName:"tr",align:null},"v1.21.7")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"helm"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.7.1")),(0,a.kt)("tr",{parentName:"tbody"},(0,a.kt)("td",{parentName:"tr",align:null},"kustomize"),(0,a.kt)("td",{parentName:"tr",align:null},"v3.10.0")))),(0,a.kt)("h3",{id:"minimum-system-requirements"},"Minimum System Requirements"),(0,a.kt)("p",null,"It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"CPU: 6 cores"),(0,a.kt)("li",{parentName:"ul"},"RAM: 12GB"),(0,a.kt)("li",{parentName:"ul"},"DISK: 50GB"),(0,a.kt)("li",{parentName:"ul"},"GPU: NVIDIA GPU (optional)")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/d9ba8899.c6bf45ce.js b/en/assets/js/d9ba8899.3b1633e9.js similarity index 99% rename from en/assets/js/d9ba8899.c6bf45ce.js rename to en/assets/js/d9ba8899.3b1633e9.js index df3ce733..7d77b246 100644 --- a/en/assets/js/d9ba8899.c6bf45ce.js +++ b/en/assets/js/d9ba8899.3b1633e9.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6780],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},d=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||u[m]||o;return n?a.createElement(k,i(i({ref:t},d),{},{components:n})):a.createElement(k,i({ref:t},d))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:r,i[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/advanced",id:"version-1.0/prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/en/docs/1.0/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/advanced.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/en/docs/1.0/prerequisites/docker/images"}},s={},c=[{value:"Making a good Docker image",id:"making-a-good-docker-image",level:2},{value:"Considerations to make Docker image:",id:"considerations-to-make-docker-image",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Naming docker tag",id:"naming-docker-tag",level:3},{value:"ETC",id:"etc",level:3},{value:"Several options for docker run",id:"several-options-for-docker-run",level:2},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"Docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"Running docker run as a background process",id:"running-docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],d={toc:c},p="wrapper";function u(e){let{components:t,...o}=e;return(0,r.kt)(p,(0,a.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"making-a-good-docker-image"},"Making a good Docker image"),(0,r.kt)("h3",{id:"considerations-to-make-docker-image"},"Considerations to make Docker image:"),(0,r.kt)("p",null,"When creating a Docker image using a Dockerfile, the ",(0,r.kt)("strong",{parentName:"p"},"order")," of the commands is important.",(0,r.kt)("br",{parentName:"p"}),"\n","This is because Docker images are composed of many Read-Only layers and when building the image, existing layers are ",(0,r.kt)("strong",{parentName:"p"},"cached")," and reused, so if you structure your Dockerfile with this in mind, you can ",(0,r.kt)("strong",{parentName:"p"},"reduce the build time"),"."),(0,r.kt)("p",null,"Each of the ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY")," commands in a Dockerfile are stored as one layer."),(0,r.kt)("p",null,"For example, if we have the following ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,r.kt)("p",null,"If you run the image built with the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash"),", it can be represented in the following layers. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"layers.png",src:n(4446).Z,width:"1080",height:"612"})),(0,r.kt)("p",null,"The topmost R/W layer does not affect the image. In other words, any changes made inside the container are volatile."),(0,r.kt)("p",null,"When a lower layer is changed, all the layers above it need to be rebuilt. Therefore, the order of Dockerfile instructions is important. It is recommended to place the parts that are frequently changed towards the end. (e.g., ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,r.kt)("p",null,"Conversely, parts that are unlikely to change should be placed towards the beginning."),(0,r.kt)("p",null,"If there are parts that are rarely changed but used in multiple places, they can be consolidated. It is advisable to create a separate image for those common parts in advance and use it as a base image."),(0,r.kt)("p",null,"For example, if you want to create separate images for an environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-cpu")," and another environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-gpu"),", you can do the following:\nCreate a base image ",(0,r.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,r.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," that includes Python and other basic packages installed. Then, when creating the images with the CPU and GPU versions of TensorFlow, you can use the base image as the ",(0,r.kt)("inlineCode",{parentName:"p"},"FROM")," instruction and write the separate instructions for installing TensorFlow in each Dockerfile. Managing two Dockerfiles in this way improves readability and reduces build time."),(0,r.kt)("p",null,"Combining layers had performance benefits in older versions of Docker. However, since you cannot guarantee the Docker version in which your Docker containers will run, it is recommended to combine layers for readability purposes. It is best to combine layers that can be combined appropriately."),(0,r.kt)("p",null,"Here is an example of a Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,r.kt)("p",null,"This can be written by combining it as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,r.kt)("p",null,"For convenience, it is better to use ",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore"),".",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore")," is similar to ",(0,r.kt)("inlineCode",{parentName:"p"},".gitignore")," in the sense that it can be excluded when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," just like when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"git add"),". "),(0,r.kt)("p",null,"More information can be found in the ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker Official Documentation"),"."),(0,r.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are both used when you want to execute a command at the runtime of the container. One of them must be present in the Dockerfile."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Difference"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD"),": Easily modifiable when running ",(0,r.kt)("inlineCode",{parentName:"li"},"docker run")," command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": Requires the use of ",(0,r.kt)("inlineCode",{parentName:"li"},"--entrypoint")," to modify")))),(0,r.kt)("p",null,"When ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are used together, ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," typically represents the arguments (parameters) for the command specified in ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT"),"."),(0,r.kt)("p",null,"For example, consider the following Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,r.kt)("p",null,"If you build and run the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the parts marked as comments deactivated, you can get the following results: "),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null}),(0,r.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"NO CMD")),(0,r.kt)("td",{parentName:"tr",align:null},"Error!"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,r.kt)("td",{parentName:"tr",align:null},"x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD x y")),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"In Kubernetes pod, ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," corresponds to the command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD")," corresponds to the arguments")))),(0,r.kt)("h3",{id:"naming-docker-tag"},"Naming docker tag"),(0,r.kt)("p",null,'Recommend not using "latest" as a tag for a Docker image, as it is the default tag name and can be easily overwritten unintentionally.'),(0,r.kt)("p",null,"It is important to ensure uniqueness of one image with one tag for the sake of collaboration and debugging in the production stage.",(0,r.kt)("br",{parentName:"p"}),"\n","Using the same tag for different contents can lead to dangling images, which are not shown in the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," but still take up storage space."),(0,r.kt)("h3",{id:"etc"},"ETC"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Logs and other information are stored separately from the container, not inside it.\nThis is because data written from within the container can be lost at any time."),(0,r.kt)("li",{parentName:"ol"},"Secrets and environment-dependent information should not be written directly into the Dockerfile but should be passed in via environment variables or a .env config file."),(0,r.kt)("li",{parentName:"ol"},"There is a ",(0,r.kt)("strong",{parentName:"li"},"linter")," for Dockerfiles, so it is useful to use it when collaborating.\n",(0,r.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,r.kt)("h2",{id:"several-options-for-docker-run"},"Several options for docker run"),(0,r.kt)("p",null,"When using Docker containers, there are some inconveniences.\nSpecifically, Docker does not store any of the work done within the Docker container by default.\nThis is because Docker containers use isolated file systems. Therefore, it is difficult to share data between multiple Docker containers."),(0,r.kt)("p",null,"To solve this problem, there are two approaches offered by Docker."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"storage.png",src:n(9288).Z,width:"501",height:"255"})),(0,r.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Use the Docker CLI to directly manage a resource called ",(0,r.kt)("inlineCode",{parentName:"li"},"volume"),"."),(0,r.kt)("li",{parentName:"ul"},"Create a specific directory under the Docker area (",(0,r.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") on the host and mount that path to a Docker container.")),(0,r.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Mount a specific path on the host to a Docker container.")),(0,r.kt)("h4",{id:"how-to-use"},"How to use?"),(0,r.kt)("p",null,"The usage is through the same interface, using the ",(0,r.kt)("inlineCode",{parentName:"p"},"-v")," option.",(0,r.kt)("br",{parentName:"p"}),"\n","However, when using volumes, you need to manage them directly by performing commands like ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume rm"),", etc."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Docker volume"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Blind mount"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,r.kt)("p",null,"When developing locally, bind mount can be convenient, but if you want to maintain a clean environment, using Docker volume and explicitly performing create and rm operations can be another approach."),(0,r.kt)("p",null,"The way storage is provided in Kubernetes ultimately relies on Docker's bind mount as well."),(0,r.kt)("h3",{id:"docker-run-with-resource-limit"},"Docker run with resource limit"),(0,r.kt)("p",null,"Basically, docker containers can ",(0,r.kt)("strong",{parentName:"p"},"fully utilize the CPU and memory resources of the host OS"),". However, when using this, depending on the resource situation of the host OS, docker containers may abnormally terminate due to issues such as ",(0,r.kt)("strong",{parentName:"p"},"OOM"),".\nTo address this problem, docker provides the ",(0,r.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"option")," which allows you to ",(0,r.kt)("strong",{parentName:"p"},"limit the usage of CPU and memory")," when running the docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,r.kt)("p",null,"After running the Docker above, you can check the usage through the 'docker stats' command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,r.kt)("p",null,"In Kubernetes, when you limit the CPU and memory resources of a pod resource, it is provided using this technique."),(0,r.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,r.kt)("p",null,"If there is a need to keep a particular container running continuously, the ",(0,r.kt)("inlineCode",{parentName:"p"},"--restart=always")," option is provided to try to re-create the container immediately after it is terminated."),(0,r.kt)("p",null,"After entering the option, run the docker."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,r.kt)("p",null,"Run ",(0,r.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps")," to check if it is restarting.\nIf it is running normally, ",(0,r.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," will be printed in STATUS."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Provides options such as "on-failure with max retries" and "always"')))),(0,r.kt)("p",null,"When specifying the restart option for a job resource in Kubernetes, this approach is used."),(0,r.kt)("h3",{id:"running-docker-run-as-a-background-process"},"Running docker run as a background process"),(0,r.kt)("p",null,"By default, when running a Docker container, it is executed as a foreground process. This means that the terminal that launched the container is automatically attached to it, preventing you from running other commands."),(0,r.kt)("p",null,"Let's try an example. Open two terminals, and in one terminal, continuously monitor ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", while in the other terminal, execute the following commands one by one and observe the behavior."),(0,r.kt)("h4",{id:"first-practice"},"First Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"You must remain stopped for 10 seconds and you cannot perform any other commands from that container. After 10 seconds, you can check in docker ps that the container has terminated."),(0,r.kt)("h4",{id:"second-practice"},"Second Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"After that, press ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + q"),"."),(0,r.kt)("p",null,"Now you can perform other commands in that terminal, and you can also see that the container is still alive for up to 10 seconds with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),'. This situation, where you exit from the Docker container, is called "detached". Docker provides an option to run containers in detached mode, which allows you to run the container in the background while executing the ',(0,r.kt)("inlineCode",{parentName:"p"},"run")," command."),(0,r.kt)("h4",{id:"third-practice"},"Third Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,r.kt)("p",null,"In detached mode, you can perform other actions in the terminal that executed the command."),(0,r.kt)("p",null,"It is good to use detached mode appropriately according to the situation.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, when developing a backend API server that communicates with the DB, the backend API server needs to be constantly checked with hot-loading while changing the source code, but the DB does not need to be monitored, so it can be executed as follows.",(0,r.kt)("br",{parentName:"p"}),"\n","Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs."),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}u.isMDXComponent=!0},4446:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},9288:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[6780],{3905:(e,t,n)=>{n.d(t,{Zo:()=>d,kt:()=>k});var a=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function o(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var s=a.createContext({}),c=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},d=function(e){var t=c(e.components);return a.createElement(s.Provider,{value:t},e.children)},p="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,d=l(e,["components","mdxType","originalType","parentName"]),p=c(n),m=r,k=p["".concat(s,".").concat(m)]||p[m]||u[m]||o;return n?a.createElement(k,i(i({ref:t},d),{},{components:n})):a.createElement(k,i({ref:t},d))}));function k(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var o=n.length,i=new Array(o);i[0]=m;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[p]="string"==typeof e?e:r,i[1]=l;for(var c=2;c{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>c});var a=n(7462),r=(n(7294),n(3905));const o={title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,l={unversionedId:"prerequisites/docker/advanced",id:"version-1.0/prerequisites/docker/advanced",title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/advanced.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/advanced",permalink:"/en/docs/1.0/prerequisites/docker/advanced",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/advanced.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"[Practice] Docker Advanced",description:"Practice to use docker more advanced way.",sidebar_position:6,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"[Practice] Docker images",permalink:"/en/docs/1.0/prerequisites/docker/images"}},s={},c=[{value:"Making a good Docker image",id:"making-a-good-docker-image",level:2},{value:"Considerations to make Docker image:",id:"considerations-to-make-docker-image",level:3},{value:"ENTRYPOINT vs CMD",id:"entrypoint-vs-cmd",level:3},{value:"Naming docker tag",id:"naming-docker-tag",level:3},{value:"ETC",id:"etc",level:3},{value:"Several options for docker run",id:"several-options-for-docker-run",level:2},{value:"Docker volume",id:"docker-volume",level:4},{value:"Bind mount",id:"bind-mount",level:4},{value:"How to use?",id:"how-to-use",level:4},{value:"Docker run with resource limit",id:"docker-run-with-resource-limit",level:3},{value:"docker run with restart policy",id:"docker-run-with-restart-policy",level:3},{value:"Running docker run as a background process",id:"running-docker-run-as-a-background-process",level:3},{value:"First Practice",id:"first-practice",level:4},{value:"Second Practice",id:"second-practice",level:4},{value:"Third Practice",id:"third-practice",level:4},{value:"References",id:"references",level:2}],d={toc:c},p="wrapper";function u(e){let{components:t,...o}=e;return(0,r.kt)(p,(0,a.Z)({},d,o,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"making-a-good-docker-image"},"Making a good Docker image"),(0,r.kt)("h3",{id:"considerations-to-make-docker-image"},"Considerations to make Docker image:"),(0,r.kt)("p",null,"When creating a Docker image using a Dockerfile, the ",(0,r.kt)("strong",{parentName:"p"},"order")," of the commands is important.",(0,r.kt)("br",{parentName:"p"}),"\n","This is because Docker images are composed of many Read-Only layers and when building the image, existing layers are ",(0,r.kt)("strong",{parentName:"p"},"cached")," and reused, so if you structure your Dockerfile with this in mind, you can ",(0,r.kt)("strong",{parentName:"p"},"reduce the build time"),"."),(0,r.kt)("p",null,"Each of the ",(0,r.kt)("inlineCode",{parentName:"p"},"RUN"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"ADD"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY")," commands in a Dockerfile are stored as one layer."),(0,r.kt)("p",null,"For example, if we have the following ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Layer 1\nFROM ubuntu:latest\n\n# Layer 2\nRUN apt-get update && apt-get install python3 pip3 -y\n\n# Layer 3\nRUN pip3 install -U pip && pip3 install torch\n\n# Layer 4\nCOPY src/ src/\n\n# Layer 5\nCMD python src/app.py\n")),(0,r.kt)("p",null,"If you run the image built with the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the command ",(0,r.kt)("inlineCode",{parentName:"p"},"docker run -it app:latest /bin/bash"),", it can be represented in the following layers. "),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"layers.png",src:n(4446).Z,width:"1080",height:"612"})),(0,r.kt)("p",null,"The topmost R/W layer does not affect the image. In other words, any changes made inside the container are volatile."),(0,r.kt)("p",null,"When a lower layer is changed, all the layers above it need to be rebuilt. Therefore, the order of Dockerfile instructions is important. It is recommended to place the parts that are frequently changed towards the end. (e.g., ",(0,r.kt)("inlineCode",{parentName:"p"},"COPY src/ app/src/"),")"),(0,r.kt)("p",null,"Conversely, parts that are unlikely to change should be placed towards the beginning."),(0,r.kt)("p",null,"If there are parts that are rarely changed but used in multiple places, they can be consolidated. It is advisable to create a separate image for those common parts in advance and use it as a base image."),(0,r.kt)("p",null,"For example, if you want to create separate images for an environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-cpu")," and another environment that uses ",(0,r.kt)("inlineCode",{parentName:"p"},"tensorflow-gpu"),", you can do the following:\nCreate a base image ",(0,r.kt)("a",{parentName:"p",href:"http://ghcr.io/makinarocks/python:3.8-base-cpu"},(0,r.kt)("inlineCode",{parentName:"a"},"ghcr.io/makinarocks/python:3.8-base"))," that includes Python and other basic packages installed. Then, when creating the images with the CPU and GPU versions of TensorFlow, you can use the base image as the ",(0,r.kt)("inlineCode",{parentName:"p"},"FROM")," instruction and write the separate instructions for installing TensorFlow in each Dockerfile. Managing two Dockerfiles in this way improves readability and reduces build time."),(0,r.kt)("p",null,"Combining layers had performance benefits in older versions of Docker. However, since you cannot guarantee the Docker version in which your Docker containers will run, it is recommended to combine layers for readability purposes. It is best to combine layers that can be combined appropriately."),(0,r.kt)("p",null,"Here is an example of a Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Bad Case\nRUN apt-get update\nRUN apt-get install build-essential -y\nRUN apt-get install curl -y\nRUN apt-get install jq -y\nRUN apt-get install git -y\n")),(0,r.kt)("p",null,"This can be written by combining it as follows."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},"# Better Case\nRUN apt-get update && \\\n apt-get install -y \\\n build-essential \\\n curl \\\n jq \\\n git\n")),(0,r.kt)("p",null,"For convenience, it is better to use ",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore"),".",(0,r.kt)("br",{parentName:"p"}),"\n",(0,r.kt)("inlineCode",{parentName:"p"},".dockerignore")," is similar to ",(0,r.kt)("inlineCode",{parentName:"p"},".gitignore")," in the sense that it can be excluded when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"docker build")," just like when doing a ",(0,r.kt)("inlineCode",{parentName:"p"},"git add"),". "),(0,r.kt)("p",null,"More information can be found in the ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/develop/develop-images/dockerfile_best-practices/"},"Docker Official Documentation"),"."),(0,r.kt)("h3",{id:"entrypoint-vs-cmd"},"ENTRYPOINT vs CMD"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are both used when you want to execute a command at the runtime of the container. One of them must be present in the Dockerfile."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("strong",{parentName:"li"},"Difference"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD"),": Easily modifiable when running ",(0,r.kt)("inlineCode",{parentName:"li"},"docker run")," command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT"),": Requires the use of ",(0,r.kt)("inlineCode",{parentName:"li"},"--entrypoint")," to modify")))),(0,r.kt)("p",null,"When ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," are used together, ",(0,r.kt)("inlineCode",{parentName:"p"},"CMD")," typically represents the arguments (parameters) for the command specified in ",(0,r.kt)("inlineCode",{parentName:"p"},"ENTRYPOINT"),"."),(0,r.kt)("p",null,"For example, consider the following Dockerfile:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-docker"},'FROM ubuntu:latest\n\n# \uc544\ub798 4 \uac00\uc9c0 option \uc744 \ubc14\uafd4\uac00\uba70 \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uba74 \uc774\ud574\ud558\uae30 \ud3b8\ud569\ub2c8\ub2e4.\n# \ub2e8, NO ENTRYPOINT \uc635\uc158\uc740 base image \uc778 ubuntu:latest \uc5d0 \uc774\ubbf8 \uc788\uc5b4\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218\ub294 \uc5c6\uace0 \ub098\uba38\uc9c0 v2, 3, 5, 6, 8, 9, 11, 12 \ub97c \ud14c\uc2a4\ud2b8\ud574\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n# ENTRYPOINT echo "Hello ENTRYPOINT"\n# ENTRYPOINT ["echo", "Hello ENTRYPOINT"]\n# CMD echo "Hello CMD"\n# CMD ["echo", "Hello CMD"]\n')),(0,r.kt)("p",null,"If you build and run the above ",(0,r.kt)("inlineCode",{parentName:"p"},"Dockerfile")," with the parts marked as comments deactivated, you can get the following results: "),(0,r.kt)("table",null,(0,r.kt)("thead",{parentName:"table"},(0,r.kt)("tr",{parentName:"thead"},(0,r.kt)("th",{parentName:"tr",align:null}),(0,r.kt)("th",{parentName:"tr",align:null},"No ENTRYPOINT"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT a b"),(0,r.kt)("th",{parentName:"tr",align:null},"ENTRYPOINT ",'["a", "b"]'))),(0,r.kt)("tbody",{parentName:"table"},(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"NO CMD")),(0,r.kt)("td",{parentName:"tr",align:null},"Error!"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD ",'["x", "y"]')),(0,r.kt)("td",{parentName:"tr",align:null},"x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b x y")),(0,r.kt)("tr",{parentName:"tbody"},(0,r.kt)("td",{parentName:"tr",align:null},(0,r.kt)("strong",{parentName:"td"},"CMD x y")),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c x y"),(0,r.kt)("td",{parentName:"tr",align:null},"/bin/sh -c a b"),(0,r.kt)("td",{parentName:"tr",align:null},"a b /bin/sh -c x y")))),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"In Kubernetes pod, ",(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"ENTRYPOINT")," corresponds to the command"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"CMD")," corresponds to the arguments")))),(0,r.kt)("h3",{id:"naming-docker-tag"},"Naming docker tag"),(0,r.kt)("p",null,'Recommend not using "latest" as a tag for a Docker image, as it is the default tag name and can be easily overwritten unintentionally.'),(0,r.kt)("p",null,"It is important to ensure uniqueness of one image with one tag for the sake of collaboration and debugging in the production stage.",(0,r.kt)("br",{parentName:"p"}),"\n","Using the same tag for different contents can lead to dangling images, which are not shown in the ",(0,r.kt)("inlineCode",{parentName:"p"},"docker images")," but still take up storage space."),(0,r.kt)("h3",{id:"etc"},"ETC"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Logs and other information are stored separately from the container, not inside it.\nThis is because data written from within the container can be lost at any time."),(0,r.kt)("li",{parentName:"ol"},"Secrets and environment-dependent information should not be written directly into the Dockerfile but should be passed in via environment variables or a .env config file."),(0,r.kt)("li",{parentName:"ol"},"There is a ",(0,r.kt)("strong",{parentName:"li"},"linter")," for Dockerfiles, so it is useful to use it when collaborating.\n",(0,r.kt)("a",{parentName:"li",href:"https://github.com/hadolint/hadolint"},"https://github.com/hadolint/hadolint"))),(0,r.kt)("h2",{id:"several-options-for-docker-run"},"Several options for docker run"),(0,r.kt)("p",null,"When using Docker containers, there are some inconveniences.\nSpecifically, Docker does not store any of the work done within the Docker container by default.\nThis is because Docker containers use isolated file systems. Therefore, it is difficult to share data between multiple Docker containers."),(0,r.kt)("p",null,"To solve this problem, there are two approaches offered by Docker."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"storage.png",src:n(9288).Z,width:"501",height:"255"})),(0,r.kt)("h4",{id:"docker-volume"},"Docker volume"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Use the Docker CLI to directly manage a resource called ",(0,r.kt)("inlineCode",{parentName:"li"},"volume"),"."),(0,r.kt)("li",{parentName:"ul"},"Create a specific directory under the Docker area (",(0,r.kt)("inlineCode",{parentName:"li"},"/var/lib/docker"),") on the host and mount that path to a Docker container.")),(0,r.kt)("h4",{id:"bind-mount"},"Bind mount"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Mount a specific path on the host to a Docker container.")),(0,r.kt)("h4",{id:"how-to-use"},"How to use?"),(0,r.kt)("p",null,"The usage is through the same interface, using the ",(0,r.kt)("inlineCode",{parentName:"p"},"-v")," option.",(0,r.kt)("br",{parentName:"p"}),"\n","However, when using volumes, you need to manage them directly by performing commands like ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume create"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume ls"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"docker volume rm"),", etc."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Docker volume"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v my_volume:/app \\\n nginx:latest\n"))),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("p",{parentName:"li"},"Blind mount"),(0,r.kt)("pre",{parentName:"li"},(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run \\\n -v /home/user/some/path:/app \\\n nginx:latest\n")))),(0,r.kt)("p",null,"When developing locally, bind mount can be convenient, but if you want to maintain a clean environment, using Docker volume and explicitly performing create and rm operations can be another approach."),(0,r.kt)("p",null,"The way storage is provided in Kubernetes ultimately relies on Docker's bind mount as well."),(0,r.kt)("h3",{id:"docker-run-with-resource-limit"},"Docker run with resource limit"),(0,r.kt)("p",null,"Basically, docker containers can ",(0,r.kt)("strong",{parentName:"p"},"fully utilize the CPU and memory resources of the host OS"),". However, when using this, depending on the resource situation of the host OS, docker containers may abnormally terminate due to issues such as ",(0,r.kt)("strong",{parentName:"p"},"OOM"),".\nTo address this problem, docker provides the ",(0,r.kt)("inlineCode",{parentName:"p"},"-m")," ",(0,r.kt)("a",{parentName:"p",href:"https://docs.docker.com/config/containers/resource_constraints/#limit-a-containers-access-to-memory"},"option")," which allows you to ",(0,r.kt)("strong",{parentName:"p"},"limit the usage of CPU and memory")," when running the docker container."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600\ndocker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600\n")),(0,r.kt)("p",null,"After running the Docker above, you can check the usage through the 'docker stats' command."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"CONTAINER ID NAME CPU % MEM USAGE / LIMIT MEM % NET I/O BLOCK I/O PIDS\n4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1\n4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1\n")),(0,r.kt)("p",null,"In Kubernetes, when you limit the CPU and memory resources of a pod resource, it is provided using this technique."),(0,r.kt)("h3",{id:"docker-run-with-restart-policy"},"docker run with restart policy"),(0,r.kt)("p",null,"If there is a need to keep a particular container running continuously, the ",(0,r.kt)("inlineCode",{parentName:"p"},"--restart=always")," option is provided to try to re-create the container immediately after it is terminated."),(0,r.kt)("p",null,"After entering the option, run the docker."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run --restart=always ubuntu\n")),(0,r.kt)("p",null,"Run ",(0,r.kt)("inlineCode",{parentName:"p"},"watch -n1 docker ps")," to check if it is restarting.\nIf it is running normally, ",(0,r.kt)("inlineCode",{parentName:"p"},"Restarting (0)")," will be printed in STATUS."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},'CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\na911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan\n')),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"},"https://docs.docker.com/engine/reference/commandline/run/#restart-policies---restart"),(0,r.kt)("ul",{parentName:"li"},(0,r.kt)("li",{parentName:"ul"},'Provides options such as "on-failure with max retries" and "always"')))),(0,r.kt)("p",null,"When specifying the restart option for a job resource in Kubernetes, this approach is used."),(0,r.kt)("h3",{id:"running-docker-run-as-a-background-process"},"Running docker run as a background process"),(0,r.kt)("p",null,"By default, when running a Docker container, it is executed as a foreground process. This means that the terminal that launched the container is automatically attached to it, preventing you from running other commands."),(0,r.kt)("p",null,"Let's try an example. Open two terminals, and in one terminal, continuously monitor ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),", while in the other terminal, execute the following commands one by one and observe the behavior."),(0,r.kt)("h4",{id:"first-practice"},"First Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"You must remain stopped for 10 seconds and you cannot perform any other commands from that container. After 10 seconds, you can check in docker ps that the container has terminated."),(0,r.kt)("h4",{id:"second-practice"},"Second Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -it ubuntu sleep 10\n")),(0,r.kt)("p",null,"After that, press ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + p")," -> ",(0,r.kt)("inlineCode",{parentName:"p"},"ctrl + q"),"."),(0,r.kt)("p",null,"Now you can perform other commands in that terminal, and you can also see that the container is still alive for up to 10 seconds with ",(0,r.kt)("inlineCode",{parentName:"p"},"docker ps"),'. This situation, where you exit from the Docker container, is called "detached". Docker provides an option to run containers in detached mode, which allows you to run the container in the background while executing the ',(0,r.kt)("inlineCode",{parentName:"p"},"run")," command."),(0,r.kt)("h4",{id:"third-practice"},"Third Practice"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-bash"},"docker run -d ubuntu sleep 10\n")),(0,r.kt)("p",null,"In detached mode, you can perform other actions in the terminal that executed the command."),(0,r.kt)("p",null,"It is good to use detached mode appropriately according to the situation.",(0,r.kt)("br",{parentName:"p"}),"\n","For example, when developing a backend API server that communicates with the DB, the backend API server needs to be constantly checked with hot-loading while changing the source code, but the DB does not need to be monitored, so it can be executed as follows.",(0,r.kt)("br",{parentName:"p"}),"\n","Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs."),(0,r.kt)("h2",{id:"references"},"References"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://towardsdatascience.com/docker-storage-598e385f4efe"},"https://towardsdatascience.com/docker-storage-598e385f4efe")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://vsupalov.com/docker-latest-tag/"},"https://vsupalov.com/docker-latest-tag/")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version"},"https://docs.microsoft.com/ko-kr/azure/container-registry/container-registry-image-tag-version")),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("a",{parentName:"li",href:"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"},"https://stevelasker.blog/2018/03/01/docker-tagging-best-practices-for-tagging-and-versioning-docker-images/"))))}u.isMDXComponent=!0},4446:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/layers-d934a487c19f428867e8d460015e8747.png"},9288:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/storage-2d2649699364f46922716d1fe9b5470a.png"}}]); \ No newline at end of file diff --git a/en/assets/js/e092da67.960eb7ac.js b/en/assets/js/e092da67.8907921a.js similarity index 97% rename from en/assets/js/e092da67.960eb7ac.js rename to en/assets/js/e092da67.8907921a.js index d91c66d4..627974b0 100644 --- a/en/assets/js/e092da67.960eb7ac.js +++ b/en/assets/js/e092da67.8907921a.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4370],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),u=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=u(r),f=o,b=c["".concat(l,".").concat(f)]||c[f]||d[f]||a;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/en/docs/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/en/docs/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/kubeflow-dashboard-guide/experiments-and-others"}},l={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(2847).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"The Experiments(AutoML) page is where you can manage ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),", which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow."),(0,o.kt)("p",null,"The usage of Katib and Experiments(AutoML) is not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for Everyone")," v1.0, and will be added in v2.0."))}d.isMDXComponent=!0},2847:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},9268:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4370],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>b});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),u=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(l.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},f=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=u(r),f=o,b=c["".concat(l,".").concat(f)]||c[f]||d[f]||a;return r?n.createElement(b,i(i({ref:t},p),{},{components:r})):n.createElement(b,i({ref:t},p))}));function b(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=f;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,i[1]=s;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>u});var n=r(7462),o=(r(7294),r(3905));const a={title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"kubeflow-dashboard-guide/experiments",id:"kubeflow-dashboard-guide/experiments",title:"5. Experiments(AutoML)",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/experiments.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/experiments",permalink:"/en/docs/kubeflow-dashboard-guide/experiments",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/experiments.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Experiments(AutoML)",description:"",sidebar_position:5,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Volumes",permalink:"/en/docs/kubeflow-dashboard-guide/volumes"},next:{title:"6. Kubeflow Pipeline Relates",permalink:"/en/docs/kubeflow-dashboard-guide/experiments-and-others"}},l={},u=[],p={toc:u},c="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(c,(0,n.Z)({},p,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("p",null,"Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"automl",src:r(2847).Z,width:"1498",height:"272"})),(0,o.kt)("p",null,"The Experiments(AutoML) page is where you can manage ",(0,o.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/katib/overview/"},"Katib"),", which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow."),(0,o.kt)("p",null,"The usage of Katib and Experiments(AutoML) is not covered in ",(0,o.kt)("em",{parentName:"p"},"MLOps for Everyone")," v1.0, and will be added in v2.0."))}d.isMDXComponent=!0},2847:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/automl-7f762c2c67e5319953ec8567769722fb.png"},9268:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/en/assets/js/e415f9f6.ef0ab99f.js b/en/assets/js/e415f9f6.76aa63b1.js similarity index 99% rename from en/assets/js/e415f9f6.ef0ab99f.js rename to en/assets/js/e415f9f6.76aa63b1.js index 53c5d8a1..d338de95 100644 --- a/en/assets/js/e415f9f6.ef0ab99f.js +++ b/en/assets/js/e415f9f6.76aa63b1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[451],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=o.createContext({}),p=function(e){var t=o.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=p(e.components);return o.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},k=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(n),k=a,m=c["".concat(s,".").concat(k)]||c[k]||d[k]||r;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,i=new Array(r);i[0]=k;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const r={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/en/docs/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/en/docs/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/en/docs/kubeflow-dashboard-guide/tensorboards"}},s={},p=[{value:"Launch Notebook Server",id:"launch-notebook-server",level:2},{value:"Accessing the Notebook Server",id:"accessing-the-notebook-server",level:2},{value:"Stopping the Notebook Server",id:"stopping-the-notebook-server",level:2}],u={toc:p},c="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(c,(0,o.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"launch-notebook-server"},"Launch Notebook Server"),(0,a.kt)("p",null,"Click on the Notebooks tab on the left side of the Central Dashboard."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:n(9268).Z,width:"3940",height:"1278"})),(0,a.kt)("p",null,"You will see a similar screen."),(0,a.kt)("p",null,"The Notebooks tab is a page where users can independently create and access jupyter notebook and code server environments (hereinafter referred to as a notebook server)."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-home",src:n(2463).Z,width:"5008",height:"2682"})),(0,a.kt)("p",null,'Click the "+ NEW NOTEBOOK" button at the top right. '),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"new-notebook",src:n(4983).Z,width:"1900",height:"312"})),(0,a.kt)("p",null,"When the screen shown below appears, now specify the spec (Spec) of the notebook server to be created."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create",src:n(7642).Z,width:"1738",height:"1674"})),(0,a.kt)("details",null,(0,a.kt)("summary",null,"For details for spec:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"name"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies a name to identify the notebook server."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"namespace"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Cannot be changed. (It is automatically set to the namespace of the currently logged-in user account.)"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Image"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Selects the image to use from pre-installed JupyterLab images with Python packages like sklearn, pytorch, tensorflow, etc.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use an image that utilizes GPU within the notebook server, refer to the ",(0,a.kt)("strong",{parentName:"li"},"GPUs")," section below."))),(0,a.kt)("li",{parentName:"ul"},"If you want to use a custom notebook server that includes additional packages or source code, you can create a custom image and deploy it for use."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"CPU / RAM"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of resources required.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"cpu: in core units",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Represents the number of virtual cores, and can also be specified as a float value such as ",(0,a.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,a.kt)("inlineCode",{parentName:"li"},"2.7"),", etc."))),(0,a.kt)("li",{parentName:"ul"},"memory: in Gi units"))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"GPUs"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the number of GPUs to allocate to the Jupyter notebook.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"None"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"When GPU resources are not required."))),(0,a.kt)("li",{parentName:"ul"},"1, 2, 4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Allocates 1, 2, or 4 GPUs."))))),(0,a.kt)("li",{parentName:"ul"},"GPU Vendor:",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you have followed the ",(0,a.kt)("a",{parentName:"li",href:"/en/docs/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," guide and installed the NVIDIA GPU plugin, select NVIDIA."))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Workspace Volume"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of disk space required within the notebook server."),(0,a.kt)("li",{parentName:"ul"},"Do not change the Type and Name fields unless you want to increase the disk space or change the AccessMode.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Check the ",(0,a.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," checkbox only if it is not necessary to save the notebook server's work. ",(0,a.kt)("strong",{parentName:"li"},"It is generally recommended not to check this option.")),(0,a.kt)("li",{parentName:"ul"},'If you want to use a pre-existing Persistent Volume Claim (PVC), select Type as "Existing" and enter the name of the PVC to use.'))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Data Volumes"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If additional storage resources are required, click the ",(0,a.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," button to create them."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"These are generally not needed, so detailed explanations are omitted in ",(0,a.kt)("em",{parentName:"li"},"MLOps for All"),"."))))),(0,a.kt)("p",null,"If you followed the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/setup-nvidia-gpu"},"Setup GPU (Optional)"),", select NVIDIA if you have installed the nvidia gpu plugin."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"creating",src:n(6525).Z,width:"1928",height:"400"})),(0,a.kt)("p",null,"After creation, the ",(0,a.kt)("strong",{parentName:"p"},"Status")," will change to a green check mark icon, and the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will be activated.\n",(0,a.kt)("img",{alt:"created",src:n(3479).Z,width:"1852",height:"352"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"accessing-the-notebook-server"},"Accessing the Notebook Server"),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will open a new browser window, where you will see the following screen:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-access",src:n(7641).Z,width:"2898",height:"1990"})),(0,a.kt)("p",null,"You can use the Notebook, Console, and Terminal icons in the ",(0,a.kt)("strong",{parentName:"p"},"Launcher")," to start using them."),(0,a.kt)("p",null," Notebook Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-console",src:n(7497).Z,width:"2850",height:"736"})),(0,a.kt)("p",null," Terminal Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"terminal-console",src:n(5869).Z,width:"2834",height:"806"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"stopping-the-notebook-server"},"Stopping the Notebook Server"),(0,a.kt)("p",null,"If you haven't used the notebook server for an extended period of time, you can stop it to optimize resource usage in the Kubernetes cluster. ",(0,a.kt)("strong",{parentName:"p"},"Note that stopping the notebook server will result in the deletion of all data stored outside the Workspace Volume or Data Volume specified when creating the notebook server."),(0,a.kt)("br",{parentName:"p"}),"\n","If you haven't changed the path during notebook server creation, the default Workspace Volume path is ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," inside the notebook server, so any data stored outside the ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," directory will be deleted."),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("inlineCode",{parentName:"p"},"STOP")," button as shown below will stop the notebook server:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-stop",src:n(2094).Z,width:"1832",height:"1014"})),(0,a.kt)("p",null,"Once the server is stopped, the ",(0,a.kt)("inlineCode",{parentName:"p"},"CONNECT")," button will be disabled. To restart the notebook server and use it again, click the ",(0,a.kt)("inlineCode",{parentName:"p"},"PLAY")," button."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-restart",src:n(5394).Z,width:"1888",height:"932"})))}d.isMDXComponent=!0},7642:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},3479:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},6525:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},9268:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},4983:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},7641:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},7497:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},2463:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},5394:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},2094:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},5869:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[451],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var s=o.createContext({}),p=function(e){var t=o.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=p(e.components);return o.createElement(s.Provider,{value:t},e.children)},c="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},k=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,r=e.originalType,s=e.parentName,u=l(e,["components","mdxType","originalType","parentName"]),c=p(n),k=a,m=c["".concat(s,".").concat(k)]||c[k]||d[k]||r;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var r=n.length,i=new Array(r);i[0]=k;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[c]="string"==typeof e?e:a,i[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>r,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const r={title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/notebooks",id:"kubeflow-dashboard-guide/notebooks",title:"2. Notebooks",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/notebooks.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/notebooks",permalink:"/en/docs/kubeflow-dashboard-guide/notebooks",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/notebooks.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Notebooks",description:"",sidebar_position:2,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"1. Central Dashboard",permalink:"/en/docs/kubeflow-dashboard-guide/intro"},next:{title:"3. Tensorboards",permalink:"/en/docs/kubeflow-dashboard-guide/tensorboards"}},s={},p=[{value:"Launch Notebook Server",id:"launch-notebook-server",level:2},{value:"Accessing the Notebook Server",id:"accessing-the-notebook-server",level:2},{value:"Stopping the Notebook Server",id:"stopping-the-notebook-server",level:2}],u={toc:p},c="wrapper";function d(e){let{components:t,...r}=e;return(0,a.kt)(c,(0,o.Z)({},u,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"launch-notebook-server"},"Launch Notebook Server"),(0,a.kt)("p",null,"Click on the Notebooks tab on the left side of the Central Dashboard."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:n(9268).Z,width:"3940",height:"1278"})),(0,a.kt)("p",null,"You will see a similar screen."),(0,a.kt)("p",null,"The Notebooks tab is a page where users can independently create and access jupyter notebook and code server environments (hereinafter referred to as a notebook server)."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-home",src:n(2463).Z,width:"5008",height:"2682"})),(0,a.kt)("p",null,'Click the "+ NEW NOTEBOOK" button at the top right. '),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"new-notebook",src:n(4983).Z,width:"1900",height:"312"})),(0,a.kt)("p",null,"When the screen shown below appears, now specify the spec (Spec) of the notebook server to be created."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"create",src:n(7642).Z,width:"1738",height:"1674"})),(0,a.kt)("details",null,(0,a.kt)("summary",null,"For details for spec:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"name"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies a name to identify the notebook server."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"namespace"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Cannot be changed. (It is automatically set to the namespace of the currently logged-in user account.)"))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Image"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Selects the image to use from pre-installed JupyterLab images with Python packages like sklearn, pytorch, tensorflow, etc.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you want to use an image that utilizes GPU within the notebook server, refer to the ",(0,a.kt)("strong",{parentName:"li"},"GPUs")," section below."))),(0,a.kt)("li",{parentName:"ul"},"If you want to use a custom notebook server that includes additional packages or source code, you can create a custom image and deploy it for use."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"CPU / RAM"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of resources required.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"cpu: in core units",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Represents the number of virtual cores, and can also be specified as a float value such as ",(0,a.kt)("inlineCode",{parentName:"li"},"1.5"),", ",(0,a.kt)("inlineCode",{parentName:"li"},"2.7"),", etc."))),(0,a.kt)("li",{parentName:"ul"},"memory: in Gi units"))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"GPUs"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the number of GPUs to allocate to the Jupyter notebook.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("inlineCode",{parentName:"li"},"None"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"When GPU resources are not required."))),(0,a.kt)("li",{parentName:"ul"},"1, 2, 4",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Allocates 1, 2, or 4 GPUs."))))),(0,a.kt)("li",{parentName:"ul"},"GPU Vendor:",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If you have followed the ",(0,a.kt)("a",{parentName:"li",href:"/en/docs/setup-kubernetes/setup-nvidia-gpu"},"(Optional) Setup GPU")," guide and installed the NVIDIA GPU plugin, select NVIDIA."))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Workspace Volume"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Specifies the amount of disk space required within the notebook server."),(0,a.kt)("li",{parentName:"ul"},"Do not change the Type and Name fields unless you want to increase the disk space or change the AccessMode.",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"Check the ",(0,a.kt)("strong",{parentName:"li"},"\"Don't use Persistent Storage for User's home\"")," checkbox only if it is not necessary to save the notebook server's work. ",(0,a.kt)("strong",{parentName:"li"},"It is generally recommended not to check this option.")),(0,a.kt)("li",{parentName:"ul"},'If you want to use a pre-existing Persistent Volume Claim (PVC), select Type as "Existing" and enter the name of the PVC to use.'))))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("strong",{parentName:"li"},"Data Volumes"),":",(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"If additional storage resources are required, click the ",(0,a.kt)("strong",{parentName:"li"},'"+ ADD VOLUME"')," button to create them."))),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("del",{parentName:"li"},"Configurations, Affinity/Tolerations, Miscellaneous Settings"),(0,a.kt)("ul",{parentName:"li"},(0,a.kt)("li",{parentName:"ul"},"These are generally not needed, so detailed explanations are omitted in ",(0,a.kt)("em",{parentName:"li"},"MLOps for All"),"."))))),(0,a.kt)("p",null,"If you followed the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/setup-kubernetes/setup-nvidia-gpu"},"Setup GPU (Optional)"),", select NVIDIA if you have installed the nvidia gpu plugin."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"creating",src:n(6525).Z,width:"1928",height:"400"})),(0,a.kt)("p",null,"After creation, the ",(0,a.kt)("strong",{parentName:"p"},"Status")," will change to a green check mark icon, and the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will be activated.\n",(0,a.kt)("img",{alt:"created",src:n(3479).Z,width:"1852",height:"352"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"accessing-the-notebook-server"},"Accessing the Notebook Server"),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("strong",{parentName:"p"},"CONNECT button")," will open a new browser window, where you will see the following screen:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-access",src:n(7641).Z,width:"2898",height:"1990"})),(0,a.kt)("p",null,"You can use the Notebook, Console, and Terminal icons in the ",(0,a.kt)("strong",{parentName:"p"},"Launcher")," to start using them."),(0,a.kt)("p",null," Notebook Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-console",src:n(7497).Z,width:"2850",height:"736"})),(0,a.kt)("p",null," Terminal Interface"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"terminal-console",src:n(5869).Z,width:"2834",height:"806"})),(0,a.kt)("hr",null),(0,a.kt)("h2",{id:"stopping-the-notebook-server"},"Stopping the Notebook Server"),(0,a.kt)("p",null,"If you haven't used the notebook server for an extended period of time, you can stop it to optimize resource usage in the Kubernetes cluster. ",(0,a.kt)("strong",{parentName:"p"},"Note that stopping the notebook server will result in the deletion of all data stored outside the Workspace Volume or Data Volume specified when creating the notebook server."),(0,a.kt)("br",{parentName:"p"}),"\n","If you haven't changed the path during notebook server creation, the default Workspace Volume path is ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," inside the notebook server, so any data stored outside the ",(0,a.kt)("inlineCode",{parentName:"p"},"/home/jovyan")," directory will be deleted."),(0,a.kt)("p",null,"Clicking the ",(0,a.kt)("inlineCode",{parentName:"p"},"STOP")," button as shown below will stop the notebook server:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-stop",src:n(2094).Z,width:"1832",height:"1014"})),(0,a.kt)("p",null,"Once the server is stopped, the ",(0,a.kt)("inlineCode",{parentName:"p"},"CONNECT")," button will be disabled. To restart the notebook server and use it again, click the ",(0,a.kt)("inlineCode",{parentName:"p"},"PLAY")," button."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"notebook-restart",src:n(5394).Z,width:"1888",height:"932"})))}d.isMDXComponent=!0},7642:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/create-b349ef65d07ce46d18eb743995e83328.png"},3479:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/created-ea0c6e5b069a3bf68ec30dd2d9c8fda9.png"},6525:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/creating-fea15b81993043e41562213ce27be9c8.png"},9268:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},4983:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/new-notebook-f462329837ba1224dad0fdd5065aa161.png"},7641:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-access-04af482a0de3bf472671bb8106d2124d.png"},7497:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-console-57b91be5611c7bc685da1b29c792a45c.png"},2463:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-home-bc23928c112e027b46359aad251a8b69.png"},5394:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-restart-6550d536547af1c9e19f8ab05946ee9d.png"},2094:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/notebook-stop-bcc860736062b5cfb5831bab545dc60c.png"},5869:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/terminal-console-7fb950f9bf731144081feb0afb245bed.png"}}]); \ No newline at end of file diff --git a/en/assets/js/e68086c7.51b60e7c.js b/en/assets/js/e68086c7.3054fb69.js similarity index 98% rename from en/assets/js/e68086c7.51b60e7c.js rename to en/assets/js/e68086c7.3054fb69.js index e2992322..4b490642 100644 --- a/en/assets/js/e68086c7.51b60e7c.js +++ b/en/assets/js/e68086c7.3054fb69.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1723],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=o.createContext({}),l=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=l(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},h=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,a=e.originalType,c=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=l(n),h=r,m=d["".concat(c,".").concat(h)]||d[h]||p[h]||a;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=n.length,i=new Array(a);i[0]=h;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:r,i[1]=s;for(var l=2;l{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>l});var o=n(7462),r=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"introduction/why_kubernetes",id:"introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/en/docs/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/why_kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/en/docs/introduction/component"},next:{title:"1. Introduction",permalink:"/en/docs/setup-kubernetes/intro"}},c={},l=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:l},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,o.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,r.kt)("p",null,"When talking about MLOps, why is the word Kubernetes always heard together?"),(0,r.kt)("p",null,"To build a successful MLOps system, various components are needed as described in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/introduction/component"},"Components of MLOps"),", but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure."),(0,r.kt)("p",null,"The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as ",(0,r.kt)("em",{parentName:"p"},"'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster?")," when developing and deploying machine learning models while a few developers share a small number of clusters."),(0,r.kt)("h2",{id:"container"},"Container"),(0,r.kt)("p",null,"Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as ",(0,r.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/en-us/overview/what-is-a-container/"},"follows"),"."),(0,r.kt)("blockquote",null,(0,r.kt)("p",{parentName:"blockquote"},"Container: Standardized, portable packaging of an application's code, libraries, and configuration files")),(0,r.kt)("p",null,"But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as ",(0,r.kt)("em",{parentName:"p"},'"It works on my notebook, why not yours?"'),"."),(0,r.kt)("p",null,'One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.'),(0,r.kt)("p",null,"However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps."),(0,r.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,r.kt)("p",null,'Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.'),(0,r.kt)("p",null,"In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues."),(0,r.kt)("p",null,"For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations."),(0,r.kt)("p",null,"A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier."),(0,r.kt)("p",null,"How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal."),(0,r.kt)("p",null,"As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems."),(0,r.kt)("p",null,"According to the ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"survey")," released by CNCF in 2018, Kubernetes was already showing its prominence. The ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"survey")," published in 2019 indicates that 78% of respondents were using Kubernetes at a production level."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"k8s-graph",src:n(9674).Z,width:"2048",height:"1317"})),(0,r.kt)("p",null,'The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.'))}p.isMDXComponent=!0},9674:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1723],{3905:(e,t,n)=>{n.d(t,{Zo:()=>u,kt:()=>m});var o=n(7294);function r(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function a(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function i(e){for(var t=1;t=0||(r[n]=e[n]);return r}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(r[n]=e[n])}return r}var c=o.createContext({}),l=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):i(i({},t),e)),n},u=function(e){var t=l(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},h=o.forwardRef((function(e,t){var n=e.components,r=e.mdxType,a=e.originalType,c=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=l(n),h=r,m=d["".concat(c,".").concat(h)]||d[h]||p[h]||a;return n?o.createElement(m,i(i({ref:t},u),{},{components:n})):o.createElement(m,i({ref:t},u))}));function m(e,t){var n=arguments,r=t&&t.mdxType;if("string"==typeof e||r){var a=n.length,i=new Array(a);i[0]=h;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s[d]="string"==typeof e?e:r,i[1]=s;for(var l=2;l{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>s,toc:()=>l});var o=n(7462),r=(n(7294),n(3905));const a={title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:new Date("2021-12-03T00:00:00.000Z"),lastmod:new Date("2021-12-10T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},i=void 0,s={unversionedId:"introduction/why_kubernetes",id:"introduction/why_kubernetes",title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/why_kubernetes.md",sourceDirName:"introduction",slug:"/introduction/why_kubernetes",permalink:"/en/docs/introduction/why_kubernetes",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/why_kubernetes.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:4,frontMatter:{title:"4. Why Kubernetes?",description:"Reason for using k8s in MLOps",sidebar_position:4,date:"2021-12-03T00:00:00.000Z",lastmod:"2021-12-10T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"3. Components of MLOps",permalink:"/en/docs/introduction/component"},next:{title:"1. Introduction",permalink:"/en/docs/setup-kubernetes/intro"}},c={},l=[{value:"MLOps & Kubernetes",id:"mlops--kubernetes",level:2},{value:"Container",id:"container",level:2},{value:"Container Orchestration System",id:"container-orchestration-system",level:2}],u={toc:l},d="wrapper";function p(e){let{components:t,...a}=e;return(0,r.kt)(d,(0,o.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,r.kt)("h2",{id:"mlops--kubernetes"},"MLOps & Kubernetes"),(0,r.kt)("p",null,"When talking about MLOps, why is the word Kubernetes always heard together?"),(0,r.kt)("p",null,"To build a successful MLOps system, various components are needed as described in ",(0,r.kt)("a",{parentName:"p",href:"/en/docs/introduction/component"},"Components of MLOps"),", but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure."),(0,r.kt)("p",null,"The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as ",(0,r.kt)("em",{parentName:"p"},"'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster?")," when developing and deploying machine learning models while a few developers share a small number of clusters."),(0,r.kt)("h2",{id:"container"},"Container"),(0,r.kt)("p",null,"Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as ",(0,r.kt)("a",{parentName:"p",href:"https://azure.microsoft.com/en-us/overview/what-is-a-container/"},"follows"),"."),(0,r.kt)("blockquote",null,(0,r.kt)("p",{parentName:"blockquote"},"Container: Standardized, portable packaging of an application's code, libraries, and configuration files")),(0,r.kt)("p",null,"But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as ",(0,r.kt)("em",{parentName:"p"},'"It works on my notebook, why not yours?"'),"."),(0,r.kt)("p",null,'One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.'),(0,r.kt)("p",null,"However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps."),(0,r.kt)("h2",{id:"container-orchestration-system"},"Container Orchestration System"),(0,r.kt)("p",null,'Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.'),(0,r.kt)("p",null,"In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues."),(0,r.kt)("p",null,"For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations."),(0,r.kt)("p",null,"A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier."),(0,r.kt)("p",null,"How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal."),(0,r.kt)("p",null,"As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems."),(0,r.kt)("p",null,"According to the ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/blog/2018/08/29/cncf-survey-use-of-cloud-native-technologies-in-production-has-grown-over-200-percent/"},"survey")," released by CNCF in 2018, Kubernetes was already showing its prominence. The ",(0,r.kt)("a",{parentName:"p",href:"https://www.cncf.io/wp-content/uploads/2020/08/CNCF_Survey_Report.pdf"},"survey")," published in 2019 indicates that 78% of respondents were using Kubernetes at a production level."),(0,r.kt)("p",null,(0,r.kt)("img",{alt:"k8s-graph",src:n(9674).Z,width:"2048",height:"1317"})),(0,r.kt)("p",null,'The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.'))}p.isMDXComponent=!0},9674:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/k8s-graph-4320bbc5bf9fc0dccdeb1edc0157e8ec.png"}}]); \ No newline at end of file diff --git a/en/assets/js/e68a1c9e.1459ba26.js b/en/assets/js/e68a1c9e.a0fc699f.js similarity index 98% rename from en/assets/js/e68a1c9e.1459ba26.js rename to en/assets/js/e68a1c9e.a0fc699f.js index 16828acf..cc753130 100644 --- a/en/assets/js/e68a1c9e.1459ba26.js +++ b/en/assets/js/e68a1c9e.a0fc699f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[463],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,s=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),m=d(t),u=o,b=m["".concat(i,".").concat(u)]||m[u]||c[u]||s;return t?a.createElement(b,r(r({ref:n},p),{},{components:t})):a.createElement(b,r({ref:n},p))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[m]="string"==typeof e?e:o,r[1]=l;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>c,frontMatter:()=>s,metadata:()=>l,toc:()=>d});var a=t(7462),o=(t(7294),t(3905));const s={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},r=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"version-1.0/setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/en/docs/1.0/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-seldon.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/en/docs/1.0/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/en/docs/1.0/setup-components/install-components-pg"}},i={},d=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Installing Seldon-Core",id:"installing-seldon-core",level:2},{value:"Adding Ambassador to the Helm Repository",id:"adding-ambassador-to-the-helm-repository",level:3},{value:"Update Ambassador - Helm Repository",id:"update-ambassador---helm-repository",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],p={toc:d},m="wrapper";function c(e){let{components:n,...t}=e;return(0,o.kt)(m,(0,a.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core is one of the open source frameworks that can deploy and manage numerous machine learning models in Kubernetes environments.",(0,o.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the official ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"product description page")," and ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"GitHub")," of Seldon-Core and API Deployment part."),(0,o.kt)("h2",{id:"installing-seldon-core"},"Installing Seldon-Core"),(0,o.kt)("p",null,"In order to use Seldon-Core, modules such as Ambassador, which is responsible for Ingress of Kubernetes, and Istio are required ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"here"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core officially supports only Ambassador and Istio, and ",(0,o.kt)("em",{parentName:"p"},"MLOps for everyone")," will use Ambassador to use Seldon-core, so we will install Ambassador."),(0,o.kt)("h3",{id:"adding-ambassador-to-the-helm-repository"},"Adding Ambassador to the Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"If the following message is displayed, it means it has been added normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-ambassador---helm-repository"},"Update Ambassador - Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is output, it means that the update has been completed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"Install version 6.9.3 of the Ambassador Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"Wait until four pods become running in the seldon-system."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"Install version 1.11.2 of the seldon-core-operator Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"Wait until one seldon-controller-manager pod is Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}c.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[463],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>b});var a=t(7294);function o(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function r(e){for(var n=1;n=0||(o[t]=e[t]);return o}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(o[t]=e[t])}return o}var i=a.createContext({}),d=function(e){var n=a.useContext(i),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=d(e.components);return a.createElement(i.Provider,{value:n},e.children)},m="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},u=a.forwardRef((function(e,n){var t=e.components,o=e.mdxType,s=e.originalType,i=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),m=d(t),u=o,b=m["".concat(i,".").concat(u)]||m[u]||c[u]||s;return t?a.createElement(b,r(r({ref:n},p),{},{components:t})):a.createElement(b,r({ref:n},p))}));function b(e,n){var t=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var s=t.length,r=new Array(s);r[0]=u;var l={};for(var i in n)hasOwnProperty.call(n,i)&&(l[i]=n[i]);l.originalType=e,l[m]="string"==typeof e?e:o,r[1]=l;for(var d=2;d{t.r(n),t.d(n,{assets:()=>i,contentTitle:()=>r,default:()=>c,frontMatter:()=>s,metadata:()=>l,toc:()=>d});var a=t(7462),o=(t(7294),t(3905));const s={title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:new Date("2021-12-13T00:00:00.000Z"),lastmod:new Date("2021-12-13T00:00:00.000Z"),contributors:["Jaeyeon Kim"]},r=void 0,l={unversionedId:"setup-components/install-components-seldon",id:"version-1.0/setup-components/install-components-seldon",title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/setup-components/install-components-seldon.md",sourceDirName:"setup-components",slug:"/setup-components/install-components-seldon",permalink:"/en/docs/1.0/setup-components/install-components-seldon",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/setup-components/install-components-seldon.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Seldon-Core",description:"\uad6c\uc131\uc694\uc18c \uc124\uce58 - Seldon-Core",sidebar_position:3,date:"2021-12-13T00:00:00.000Z",lastmod:"2021-12-13T00:00:00.000Z",contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. MLflow Tracking Server",permalink:"/en/docs/1.0/setup-components/install-components-mlflow"},next:{title:"4. Prometheus & Grafana",permalink:"/en/docs/1.0/setup-components/install-components-pg"}},i={},d=[{value:"Seldon-Core",id:"seldon-core",level:2},{value:"Installing Seldon-Core",id:"installing-seldon-core",level:2},{value:"Adding Ambassador to the Helm Repository",id:"adding-ambassador-to-the-helm-repository",level:3},{value:"Update Ambassador - Helm Repository",id:"update-ambassador---helm-repository",level:3},{value:"Ambassador - Helm Install",id:"ambassador---helm-install",level:3},{value:"Seldon-Core - Helm Install",id:"seldon-core---helm-install",level:3},{value:"References",id:"references",level:2}],p={toc:d},m="wrapper";function c(e){let{components:n,...t}=e;return(0,o.kt)(m,(0,a.Z)({},p,t,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"seldon-core"},"Seldon-Core"),(0,o.kt)("p",null,"Seldon-Core is one of the open source frameworks that can deploy and manage numerous machine learning models in Kubernetes environments.",(0,o.kt)("br",{parentName:"p"}),"\n","For more details, please refer to the official ",(0,o.kt)("a",{parentName:"p",href:"https://www.seldon.io/tech/products/core/"},"product description page")," and ",(0,o.kt)("a",{parentName:"p",href:"https://github.com/SeldonIO/seldon-core"},"GitHub")," of Seldon-Core and API Deployment part."),(0,o.kt)("h2",{id:"installing-seldon-core"},"Installing Seldon-Core"),(0,o.kt)("p",null,"In order to use Seldon-Core, modules such as Ambassador, which is responsible for Ingress of Kubernetes, and Istio are required ",(0,o.kt)("a",{parentName:"p",href:"https://docs.seldon.io/projects/seldon-core/en/latest/workflow/install.html"},"here"),".",(0,o.kt)("br",{parentName:"p"}),"\n","Seldon-Core officially supports only Ambassador and Istio, and ",(0,o.kt)("em",{parentName:"p"},"MLOps for everyone")," will use Ambassador to use Seldon-core, so we will install Ambassador."),(0,o.kt)("h3",{id:"adding-ambassador-to-the-helm-repository"},"Adding Ambassador to the Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo add datawire https://www.getambassador.io\n")),(0,o.kt)("p",null,"If the following message is displayed, it means it has been added normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'"datawire" has been added to your repositories\n')),(0,o.kt)("h3",{id:"update-ambassador---helm-repository"},"Update Ambassador - Helm Repository"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm repo update\n")),(0,o.kt)("p",null,"If the following message is output, it means that the update has been completed normally."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},'Hang tight while we grab the latest from your chart repositories...\n...Successfully got an update from the "datawire" chart repository\nUpdate Complete. \u2388Happy Helming!\u2388\n')),(0,o.kt)("h3",{id:"ambassador---helm-install"},"Ambassador - Helm Install"),(0,o.kt)("p",null,"Install version 6.9.3 of the Ambassador Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install ambassador datawire/ambassador \\\n --namespace seldon-system \\\n --create-namespace \\\n --set image.repository=quay.io/datawire/ambassador \\\n --set enableAES=false \\\n --set crds.keep=false \\\n --version 6.9.3\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"\uc0dd\ub7b5...\n\nW1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role\nW1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding\nNAME: ambassador\nLAST DEPLOYED: Mon Dec 6 17:01:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nNOTES:\n-------------------------------------------------------------------------------\n Congratulations! You've successfully installed Ambassador!\n\n-------------------------------------------------------------------------------\nTo get the IP address of Ambassador, run the following commands:\nNOTE: It may take a few minutes for the LoadBalancer IP to be available.\n You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'\n\n On GKE/Azure:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')\n\n On AWS:\n export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')\n\n echo http://$SERVICE_IP:\n\nFor help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.\n")),(0,o.kt)("p",null,"Wait until four pods become running in the seldon-system."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"ambassador-7f596c8b57-4s9xh 1/1 Running 0 7m15s\nambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s\nambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s\nambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s\n")),(0,o.kt)("h3",{id:"seldon-core---helm-install"},"Seldon-Core - Helm Install"),(0,o.kt)("p",null,"Install version 1.11.2 of the seldon-core-operator Chart."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"helm install seldon-core seldon-core-operator \\\n --repo https://storage.googleapis.com/seldon-charts \\\n --namespace seldon-system \\\n --set usageMetrics.enabled=true \\\n --set ambassador.enabled=true \\\n --version 1.11.2\n")),(0,o.kt)("p",null,"The following message should be displayed."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"Skip...\n\nW1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration\nNAME: seldon-core\nLAST DEPLOYED: Mon Dec 6 17:05:34 2021\nNAMESPACE: seldon-system\nSTATUS: deployed\nREVISION: 1\nTEST SUITE: None\n")),(0,o.kt)("p",null,"Wait until one seldon-controller-manager pod is Running in the seldon-system namespace."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get pod -n seldon-system | grep seldon-controller\n")),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-controller-manager-8457b8b5c7-r2frm 1/1 Running 0 2m22s\n")),(0,o.kt)("h2",{id:"references"},"References"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},(0,o.kt)("a",{parentName:"li",href:"https://docs.seldon.io/projects/seldon-core/en/latest/examples/server_examples.html#examples-server-examples--page-root"},"Example Model Servers with Seldon"))))}c.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/en/assets/js/ea288814.9cec2943.js b/en/assets/js/ea288814.ba96924f.js similarity index 97% rename from en/assets/js/ea288814.9cec2943.js rename to en/assets/js/ea288814.ba96924f.js index d47aa79f..4f0af589 100644 --- a/en/assets/js/ea288814.9cec2943.js +++ b/en/assets/js/ea288814.ba96924f.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4510],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},b=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),d=u(r),b=a,m=d["".concat(s,".").concat(b)]||d[b]||c[b]||o;return r?n.createElement(m,i(i({ref:t},p),{},{components:r})):n.createElement(m,i({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=b;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[d]="string"==typeof e?e:a,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>u});var n=r(7462),a=(r(7294),r(3905));const o={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"version-1.0/kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/en/docs/1.0/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},d="wrapper";function c(e){let{components:t,...o}=e;return(0,a.kt)(d,(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Once you have completed ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-kf"},"Kubeflow installation"),", you can access the dashboard through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"after-login",src:r(7192).Z,width:"4008",height:"1266"})),(0,a.kt)("p",null,"The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:r(7173).Z,width:"3940",height:"1278"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Home"),(0,a.kt)("li",{parentName:"ul"},"Notebooks"),(0,a.kt)("li",{parentName:"ul"},"Tensorboards"),(0,a.kt)("li",{parentName:"ul"},"Volumes"),(0,a.kt)("li",{parentName:"ul"},"Models"),(0,a.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,a.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,a.kt)("li",{parentName:"ul"},"Pipelines"),(0,a.kt)("li",{parentName:"ul"},"Runs"),(0,a.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,a.kt)("li",{parentName:"ul"},"Artifacts"),(0,a.kt)("li",{parentName:"ul"},"Executions")),(0,a.kt)("p",null,"Let's now look at the simple usage of each feature."))}c.isMDXComponent=!0},7192:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},7173:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[4510],{3905:(e,t,r)=>{r.d(t,{Zo:()=>p,kt:()=>m});var n=r(7294);function a(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function o(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(a[r]=e[r]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=n.createContext({}),u=function(e){var t=n.useContext(s),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},p=function(e){var t=u(e.components);return n.createElement(s.Provider,{value:t},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},b=n.forwardRef((function(e,t){var r=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),d=u(r),b=a,m=d["".concat(s,".").concat(b)]||d[b]||c[b]||o;return r?n.createElement(m,i(i({ref:t},p),{},{components:r})):n.createElement(m,i({ref:t},p))}));function m(e,t){var r=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=r.length,i=new Array(o);i[0]=b;var l={};for(var s in t)hasOwnProperty.call(t,s)&&(l[s]=t[s]);l.originalType=e,l[d]="string"==typeof e?e:a,i[1]=l;for(var u=2;u{r.r(t),r.d(t,{assets:()=>s,contentTitle:()=>i,default:()=>c,frontMatter:()=>o,metadata:()=>l,toc:()=>u});var n=r(7462),a=(r(7294),r(3905));const o={title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},i=void 0,l={unversionedId:"kubeflow-dashboard-guide/intro",id:"version-1.0/kubeflow-dashboard-guide/intro",title:"1. Central Dashboard",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/kubeflow-dashboard-guide/intro.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/intro",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/kubeflow-dashboard-guide/intro.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. Central Dashboard",description:"",sidebar_position:1,contributors:["Jaeyeon Kim","SeungTae Kim"]},sidebar:"tutorialSidebar",previous:{title:"4. Prometheus & Grafana",permalink:"/en/docs/1.0/setup-components/install-components-pg"},next:{title:"2. Notebooks",permalink:"/en/docs/1.0/kubeflow-dashboard-guide/notebooks"}},s={},u=[],p={toc:u},d="wrapper";function c(e){let{components:t,...o}=e;return(0,a.kt)(d,(0,n.Z)({},p,o,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"Once you have completed ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/1.0/setup-components/install-components-kf"},"Kubeflow installation"),", you can access the dashboard through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"after-login",src:r(7192).Z,width:"4008",height:"1266"})),(0,a.kt)("p",null,"The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"left-tabs",src:r(7173).Z,width:"3940",height:"1278"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Home"),(0,a.kt)("li",{parentName:"ul"},"Notebooks"),(0,a.kt)("li",{parentName:"ul"},"Tensorboards"),(0,a.kt)("li",{parentName:"ul"},"Volumes"),(0,a.kt)("li",{parentName:"ul"},"Models"),(0,a.kt)("li",{parentName:"ul"},"Experiments(AutoML)"),(0,a.kt)("li",{parentName:"ul"},"Experiments(KFP)"),(0,a.kt)("li",{parentName:"ul"},"Pipelines"),(0,a.kt)("li",{parentName:"ul"},"Runs"),(0,a.kt)("li",{parentName:"ul"},"Recurring Runs"),(0,a.kt)("li",{parentName:"ul"},"Artifacts"),(0,a.kt)("li",{parentName:"ul"},"Executions")),(0,a.kt)("p",null,"Let's now look at the simple usage of each feature."))}c.isMDXComponent=!0},7192:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/after-login-4b41daca6d9a97824552770b832d59b0.png"},7173:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"}}]); \ No newline at end of file diff --git a/en/assets/js/ef82f9f8.46924cf3.js b/en/assets/js/ef82f9f8.bd899c1b.js similarity index 97% rename from en/assets/js/ef82f9f8.46924cf3.js rename to en/assets/js/ef82f9f8.bd899c1b.js index 9efb595e..4da2489c 100644 --- a/en/assets/js/ef82f9f8.46924cf3.js +++ b/en/assets/js/ef82f9f8.bd899c1b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1033],{3905:(e,t,r)=>{r.d(t,{Zo:()=>d,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),u=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},d=function(e){var t=u(e.components);return o.createElement(l.Provider,{value:t},e.children)},c="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),c=u(r),b=n,f=c["".concat(l,".").concat(b)]||c[b]||p[b]||a;return r?o.createElement(f,s(s({ref:t},d),{},{components:r})):o.createElement(f,s({ref:t},d))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[c]="string"==typeof e?e:n,s[1]=i;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/en/docs/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/en/docs/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/en/docs/kubeflow-dashboard-guide/volumes"}},l={},u=[],d={toc:u},c="wrapper";function p(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,o.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"We can see the following screen. "),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(5218).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes."),(0,n.kt)("p",null,"You can refer to the ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"TensorBoard documentation")," for more information on using TensorBoard with Kubeflow Pipeline runs."),(0,n.kt)("p",null,"There are various ways to visualize the results of Kubeflow Pipeline runs, and in ",(0,n.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context."))}p.isMDXComponent=!0},9268:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},5218:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1033],{3905:(e,t,r)=>{r.d(t,{Zo:()=>d,kt:()=>f});var o=r(7294);function n(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,o)}return r}function s(e){for(var t=1;t=0||(n[r]=e[r]);return n}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var l=o.createContext({}),u=function(e){var t=o.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},d=function(e){var t=u(e.components);return o.createElement(l.Provider,{value:t},e.children)},c="mdxType",p={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},b=o.forwardRef((function(e,t){var r=e.components,n=e.mdxType,a=e.originalType,l=e.parentName,d=i(e,["components","mdxType","originalType","parentName"]),c=u(r),b=n,f=c["".concat(l,".").concat(b)]||c[b]||p[b]||a;return r?o.createElement(f,s(s({ref:t},d),{},{components:r})):o.createElement(f,s({ref:t},d))}));function f(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var a=r.length,s=new Array(a);s[0]=b;var i={};for(var l in t)hasOwnProperty.call(t,l)&&(i[l]=t[l]);i.originalType=e,i[c]="string"==typeof e?e:n,s[1]=i;for(var u=2;u{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>s,default:()=>p,frontMatter:()=>a,metadata:()=>i,toc:()=>u});var o=r(7462),n=(r(7294),r(3905));const a={title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},s=void 0,i={unversionedId:"kubeflow-dashboard-guide/tensorboards",id:"kubeflow-dashboard-guide/tensorboards",title:"3. Tensorboards",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow-dashboard-guide/tensorboards.md",sourceDirName:"kubeflow-dashboard-guide",slug:"/kubeflow-dashboard-guide/tensorboards",permalink:"/en/docs/kubeflow-dashboard-guide/tensorboards",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow-dashboard-guide/tensorboards.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"3. Tensorboards",description:"",sidebar_position:3,contributors:["Jaeyeon Kim"]},sidebar:"tutorialSidebar",previous:{title:"2. Notebooks",permalink:"/en/docs/kubeflow-dashboard-guide/notebooks"},next:{title:"4. Volumes",permalink:"/en/docs/kubeflow-dashboard-guide/volumes"}},l={},u=[],d={toc:u},c="wrapper";function p(e){let{components:t,...a}=e;return(0,n.kt)(c,(0,o.Z)({},d,a,{components:t,mdxType:"MDXLayout"}),(0,n.kt)("p",null,"Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next."),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"left-tabs",src:r(9268).Z,width:"3940",height:"1278"})),(0,n.kt)("p",null,"We can see the following screen. "),(0,n.kt)("p",null,(0,n.kt)("img",{alt:"tensorboard",src:r(5218).Z,width:"2030",height:"406"})),(0,n.kt)("p",null,"The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes."),(0,n.kt)("p",null,"You can refer to the ",(0,n.kt)("a",{parentName:"p",href:"https://www.kubeflow.org/docs/components/pipelines/sdk/output-viewer/#tensorboard"},"TensorBoard documentation")," for more information on using TensorBoard with Kubeflow Pipeline runs."),(0,n.kt)("p",null,"There are various ways to visualize the results of Kubeflow Pipeline runs, and in ",(0,n.kt)("em",{parentName:"p"},"MLOps for ALL"),", we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context."))}p.isMDXComponent=!0},9268:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/left-tabs-4290df638df45a698ebc615a5bcb5b86.png"},5218:(e,t,r)=>{r.d(t,{Z:()=>o});const o=r.p+"assets/images/tensorboard-ec19f59c613e94e6b1ba7759e853f4ed.png"}}]); \ No newline at end of file diff --git a/en/assets/js/f2563ea8.bb4530a6.js b/en/assets/js/f2563ea8.fa6b5001.js similarity index 99% rename from en/assets/js/f2563ea8.bb4530a6.js rename to en/assets/js/f2563ea8.fa6b5001.js index 6157ecb9..b8bc99a5 100644 --- a/en/assets/js/f2563ea8.bb4530a6.js +++ b/en/assets/js/f2563ea8.fa6b5001.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1607],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=l.createContext({}),m=function(e){var n=l.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=m(e.components);return l.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),d=m(t),u=a,f=d["".concat(s,".").concat(u)]||d[u]||c[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[d]="string"==typeof e?e:a,r[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>i,toc:()=>m});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/en/docs/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/en/docs/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/en/docs/api-deployment/seldon-children"}},s={},m=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API Creation",id:"api-creation",level:2}],p={toc:m},d="wrapper";function c(e){let{components:n,...o}=e;return(0,a.kt)(d,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"On this page, we will learn how to create an API using a model saved in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-mlflow"},"MLflow Component"),"."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,"The input value for ",(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," is ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),". However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following. "),(0,a.kt)("p",null,"The values that need to be entered in data are as follows."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"The encoding can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"Then the following values will be output."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"If you do the encoding for the entire value, it will look like this:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000="},"http://minio-service.kubeflow.svc:9000=")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false=")),(0,a.kt)("p",null,"You can generate a yaml file through the following command to create the secret."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"Create the secret through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"If performed normally, it will be output as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"Now let's write the yaml file to create Seldon Core."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"There are two major changes compared to the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/api-deployment/seldon-fields"},"Seldon Fields"),":"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"The ",(0,a.kt)("inlineCode",{parentName:"li"},"envFrom")," field is added to the initContainer."),(0,a.kt)("li",{parentName:"ol"},"The address in the args has been changed to ",(0,a.kt)("inlineCode",{parentName:"li"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"),".")),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?"),(0,a.kt)("p",null,"To find the path, go back to MLflow and click on the run, then click on the model, as shown below:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(7484).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"You can use the path obtained from there."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"This process involves providing the environment variables required to access MinIO and download the model. We will use the ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret")," created earlier."),(0,a.kt)("h2",{id:"api-creation"},"API Creation"),(0,a.kt)("p",null,"First, let's generate the YAML file based on the specification defined above."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"Create a seldon pod."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"If it is performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"Now we wait until the pod is up and running properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"If it is outputted similarly to the following, the API has been created normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"You can confirm the execution through the following request on the API created through the CLI."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"If executed normally, you can get the following results."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}c.isMDXComponent=!0},7484:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[1607],{3905:(e,n,t)=>{t.d(n,{Zo:()=>p,kt:()=>f});var l=t(7294);function a(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);n&&(l=l.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,l)}return t}function r(e){for(var n=1;n=0||(a[t]=e[t]);return a}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(a[t]=e[t])}return a}var s=l.createContext({}),m=function(e){var n=l.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):r(r({},n),e)),t},p=function(e){var n=m(e.components);return l.createElement(s.Provider,{value:n},e.children)},d="mdxType",c={inlineCode:"code",wrapper:function(e){var n=e.children;return l.createElement(l.Fragment,{},n)}},u=l.forwardRef((function(e,n){var t=e.components,a=e.mdxType,o=e.originalType,s=e.parentName,p=i(e,["components","mdxType","originalType","parentName"]),d=m(t),u=a,f=d["".concat(s,".").concat(u)]||d[u]||c[u]||o;return t?l.createElement(f,r(r({ref:n},p),{},{components:t})):l.createElement(f,r({ref:n},p))}));function f(e,n){var t=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var o=t.length,r=new Array(o);r[0]=u;var i={};for(var s in n)hasOwnProperty.call(n,s)&&(i[s]=n[s]);i.originalType=e,i[d]="string"==typeof e?e:a,r[1]=i;for(var m=2;m{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>r,default:()=>c,frontMatter:()=>o,metadata:()=>i,toc:()=>m});var l=t(7462),a=(t(7294),t(3905));const o={title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},r=void 0,i={unversionedId:"api-deployment/seldon-mlflow",id:"api-deployment/seldon-mlflow",title:"5. Model from MLflow",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/api-deployment/seldon-mlflow.md",sourceDirName:"api-deployment",slug:"/api-deployment/seldon-mlflow",permalink:"/en/docs/api-deployment/seldon-mlflow",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/api-deployment/seldon-mlflow.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:5,frontMatter:{title:"5. Model from MLflow",description:"",sidebar_position:5,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"4. Seldon Fields",permalink:"/en/docs/api-deployment/seldon-fields"},next:{title:"6. Multi Models",permalink:"/en/docs/api-deployment/seldon-children"}},s={},m=[{value:"Model from MLflow",id:"model-from-mlflow",level:2},{value:"Secret",id:"secret",level:2},{value:"Seldon Core yaml",id:"seldon-core-yaml",level:2},{value:"args",id:"args",level:3},{value:"envFrom",id:"envfrom",level:3},{value:"API Creation",id:"api-creation",level:2}],p={toc:m},d="wrapper";function c(e){let{components:n,...o}=e;return(0,a.kt)(d,(0,l.Z)({},p,o,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"model-from-mlflow"},"Model from MLflow"),(0,a.kt)("p",null,"On this page, we will learn how to create an API using a model saved in the ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/kubeflow/advanced-mlflow"},"MLflow Component"),"."),(0,a.kt)("h2",{id:"secret"},"Secret"),(0,a.kt)("p",null,"The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"apiVersion: v1\ntype: Opaque\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8K=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp\n USE_SSL: ZmFsc2U=\n")),(0,a.kt)("p",null,"The input value for ",(0,a.kt)("inlineCode",{parentName:"p"},"AWS_ACCESS_KEY_ID")," is ",(0,a.kt)("inlineCode",{parentName:"p"},"minio"),". However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following. "),(0,a.kt)("p",null,"The values that need to be entered in data are as follows."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio"),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123"),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000"},"http://minio-service.kubeflow.svc:9000")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false")),(0,a.kt)("p",null,"The encoding can be done using the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"echo -n minio | base64\n")),(0,a.kt)("p",null,"Then the following values will be output."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"bWluaW8=\n")),(0,a.kt)("p",null,"If you do the encoding for the entire value, it will look like this:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"AWS_ACCESS_KEY_ID: minio="),(0,a.kt)("li",{parentName:"ul"},"AWS_SECRET_ACCESS_KEY: minio123="),(0,a.kt)("li",{parentName:"ul"},"AWS_ENDPOINT_URL: ",(0,a.kt)("a",{parentName:"li",href:"http://minio-service.kubeflow.svc:9000="},"http://minio-service.kubeflow.svc:9000=")),(0,a.kt)("li",{parentName:"ul"},"USE_SSL: false=")),(0,a.kt)("p",null,"You can generate a yaml file through the following command to create the secret."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"cat < seldon-init-container-secret.yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: seldon-init-container-secret\n namespace: kubeflow-user-example-com\ntype: Opaque\ndata:\n AWS_ACCESS_KEY_ID: bWluaW8=\n AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=\n AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=\n USE_SSL: ZmFsc2U=\nEOF\n")),(0,a.kt)("p",null,"Create the secret through the following command."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-init-container-secret.yaml\n")),(0,a.kt)("p",null,"If performed normally, it will be output as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"secret/seldon-init-container-secret created\n")),(0,a.kt)("h2",{id:"seldon-core-yaml"},"Seldon Core yaml"),(0,a.kt)("p",null,"Now let's write the yaml file to create Seldon Core."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n children: []\n')),(0,a.kt)("p",null,"There are two major changes compared to the previously created ",(0,a.kt)("a",{parentName:"p",href:"/en/docs/api-deployment/seldon-fields"},"Seldon Fields"),":"),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"The ",(0,a.kt)("inlineCode",{parentName:"li"},"envFrom")," field is added to the initContainer."),(0,a.kt)("li",{parentName:"ol"},"The address in the args has been changed to ",(0,a.kt)("inlineCode",{parentName:"li"},"s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"),".")),(0,a.kt)("h3",{id:"args"},"args"),(0,a.kt)("p",null,"Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?"),(0,a.kt)("p",null,"To find the path, go back to MLflow and click on the run, then click on the model, as shown below:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"seldon-mlflow-0.png",src:t(7484).Z,width:"3466",height:"2274"})),(0,a.kt)("p",null,"You can use the path obtained from there."),(0,a.kt)("h3",{id:"envfrom"},"envFrom"),(0,a.kt)("p",null,"This process involves providing the environment variables required to access MinIO and download the model. We will use the ",(0,a.kt)("inlineCode",{parentName:"p"},"seldon-init-container-secret")," created earlier."),(0,a.kt)("h2",{id:"api-creation"},"API Creation"),(0,a.kt)("p",null,"First, let's generate the YAML file based on the specification defined above."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'apiVersion: machinelearning.seldon.io/v1\nkind: SeldonDeployment\nmetadata:\n name: seldon-example\n namespace: kubeflow-user-example-com\nspec:\n name: model\n predictors:\n - name: model\n\n componentSpecs:\n - spec:\n volumes:\n - name: model-provision-location\n emptyDir: {}\n\n initContainers:\n - name: model-initializer\n image: gcr.io/kfserving/storage-initializer:v0.4.0\n args:\n - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"\n - "/mnt/models"\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n envFrom:\n - secretRef:\n name: seldon-init-container-secret\n\n containers:\n - name: model\n image: ghcr.io/mlops-for-all/mlflowserver\n volumeMounts:\n - mountPath: /mnt/models\n name: model-provision-location\n readOnly: true\n securityContext:\n privileged: true\n runAsUser: 0\n runAsGroup: 0\n\n graph:\n name: model\n type: MODEL\n parameters:\n - name: model_uri\n type: STRING\n value: "/mnt/models"\n - name: xtype\n type: STRING\n value: "dataframe"\n children: []\nEOF\n')),(0,a.kt)("p",null,"Create a seldon pod."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl apply -f seldon-mlflow.yaml\n\n")),(0,a.kt)("p",null,"If it is performed normally, it will be outputted as follows."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldondeployment.machinelearning.seldon.io/seldon-example created\n")),(0,a.kt)("p",null,"Now we wait until the pod is up and running properly."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl get po -n kubeflow-user-example-com | grep seldon\n")),(0,a.kt)("p",null,"If it is outputted similarly to the following, the API has been created normally."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},"seldon-example-model-0-model-5c949bd894-c5f28 3/3 Running 0 69s\n")),(0,a.kt)("p",null,"You can confirm the execution through the following request on the API created through the CLI."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \\\n-H \'Content-Type: application/json\' \\\n-d \'{\n "data": {\n "ndarray": [\n [\n 143.0,\n 0.0,\n 30.0,\n 30.0\n ]\n ],\n "names": [\n "sepal length (cm)",\n "sepal width (cm)",\n "petal length (cm)",\n "petal width (cm)"\n ]\n }\n}\'\n')),(0,a.kt)("p",null,"If executed normally, you can get the following results."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-bash"},'{"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}\n')))}c.isMDXComponent=!0},7484:(e,n,t)=>{t.d(n,{Z:()=>l});const l=t.p+"assets/images/seldon-mlflow-0-1d29992e36aa6ee88621e221794159d1.png"}}]); \ No newline at end of file diff --git a/en/assets/js/f748dfb1.0d39b6f2.js b/en/assets/js/f748dfb1.c7fdde06.js similarity index 99% rename from en/assets/js/f748dfb1.0d39b6f2.js rename to en/assets/js/f748dfb1.c7fdde06.js index 6a69ccf4..320f4cd6 100644 --- a/en/assets/js/f748dfb1.0d39b6f2.js +++ b/en/assets/js/f748dfb1.c7fdde06.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7376],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>u});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),d=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return a.createElement(l.Provider,{value:t},e.children)},c="mdxType",h={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=d(n),m=o,u=c["".concat(l,".").concat(m)]||c[m]||h[m]||i;return n?a.createElement(u,r(r({ref:t},p),{},{components:n})):a.createElement(u,r({ref:t},p))}));function u(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>h,frontMatter:()=>i,metadata:()=>s,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const i={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/intro",id:"introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/en/docs/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/en/docs/introduction/levels"}},l={},d=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML + Ops",id:"1-ml--ops",level:3},{value:"Rule-Based Approach",id:"rule-based-approach",level:4},{value:"Machine Learning Approach",id:"machine-learning-approach",level:4},{value:"Deep Learning Approach",id:"deep-learning-approach",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) Conclusion",id:"3-conclusion",level:3}],p={toc:d},c="wrapper";function h(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,a.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,o.kt)("p",null,"Since 2012, when Alexnet was introduced, Machine Learning and Deep Learning have been introduced in any domain where data exists, such as Computer Vision and Natural Language Processing. Deep Learning and Machine Learning were referred to collectively as AI, and the need for AI was shouted from many media. And many companies conducted numerous projects using Machine Learning and Deep Learning. But what was the result? Byungchan Eum, the Head of North East Asia at Element AI, said \u201cIf 10 companies start an AI project, 9 of them will only be able to do concept validation (POC)\u201d."),(0,o.kt)("p",null,"In this way, in many projects, Machine Learning and Deep Learning only showed the possibility that they could solve this problem and then disappeared. And around this time, the outlook that ",(0,o.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI Winter was coming again")," also began to emerge."),(0,o.kt)("p",null,"Why did most projects end at the concept validation (POC) stage? Because it is impossible to operate an actual service with only Machine Learning and Deep Learning code."),(0,o.kt)("p",null,"At the actual service stage, the portion taken up by machine learning and deep learning code is not as large as one would think, so one must consider many other aspects besides simply the performance of the model. Google has pointed out this problem in their 2015 paper ",(0,o.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),". However, at the time this paper was released, many ML engineers were busy proving the potential of deep learning and machine learning, so the points made in the paper were not given much attention. "),(0,o.kt)("p",null,"And after a few years, machine learning and deep learning had proven their potential and people were now looking to apply it to actual services. However, soon many people realized that actual services were not as easy as they thought."),(0,o.kt)("h2",{id:"devops"},"Devops"),(0,o.kt)("p",null,"MLOps is not a new concept, but rather a term derived from the development methodology called DevOps. Therefore, understanding DevOps can help in understanding MLOps."),(0,o.kt)("h3",{id:"devops-1"},"DevOps"),(0,o.kt)("p",null,'DevOps is a portmanteau of "Development" and "Operations," referring to a development and operations methodology that emphasizes communication, collaboration, and integration between software developers and IT professionals. It encompasses both the development and operation phases of software, aiming to achieve a symbiotic relationship between the two. The primary goal of DevOps is to enable organizations to develop and deploy software products and services rapidly by fostering close collaboration and interdependence between development and operations teams.'),(0,o.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,o.kt)("p",null,"Let's explore why DevOps is necessary through a simple scenario."),(0,o.kt)("p",null,"In the early stages of a service, there are fewer supported features, and the team or company is relatively small. At this point, there may not be a clear distinction between development and operations, or the teams may be small. The key point here is the small scale. In such cases, there are many points of contact for effective communication, and with a limited number of services to focus on, it is possible to rapidly improve the service."),(0,o.kt)("p",null,"However, as the service scales up, the development and operations teams tend to separate, and the physical limitations of communication channels become apparent. For example, in meetings involving multiple teams, only team leaders or a small number of seniors may attend, rather than the entire team. These limitations in communication channels inevitably lead to a lack of communication. Consequently, the development team continues to develop new features, while the operations team faces issues during deployment caused by the features developed by the development team."),(0,o.kt)("p",null,"When such situations are repeated, it can lead to organizational silos, a phenomenon known as silo mentality."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"silo",src:n(3871).Z,width:"892",height:"498"})),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},'Indeed, the term "silo" originally refers to a tall, cylindrical structure used for storing grain or livestock feed. Silos are designed to keep the stored materials separate and prevent them from mixing.\nIn the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.')),(0,o.kt)("p",null,"The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations."),(0,o.kt)("h3",{id:"cicd"},"CI/CD"),(0,o.kt)("p",null,"Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cicd",src:n(9187).Z,width:"1400",height:"299"})),(0,o.kt)("p",null,"Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams."),(0,o.kt)("h2",{id:"mlops"},"MLOps"),(0,o.kt)("h3",{id:"1-ml--ops"},"1) ML + Ops"),(0,o.kt)("p",null,"DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience."),(0,o.kt)("p",null,'MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.'),(0,o.kt)("p",null,"MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system."),(0,o.kt)("h4",{id:"rule-based-approach"},"Rule-Based Approach"),(0,o.kt)("p",null,"In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification."),(0,o.kt)("h4",{id:"machine-learning-approach"},"Machine Learning Approach"),(0,o.kt)("p",null,"As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed."),(0,o.kt)("h4",{id:"deep-learning-approach"},"Deep Learning Approach"),(0,o.kt)("p",null,"When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed."),(0,o.kt)("p",null,"By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"graph",src:n(3278).Z,width:"752",height:"582"})),(0,o.kt)("p",null,"If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning."),(0,o.kt)("p",null,"If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge."),(0,o.kt)("p",null,"Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively."),(0,o.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,o.kt)("p",null,"However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects."),(0,o.kt)("p",null,"In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary."),(0,o.kt)("h3",{id:"3-conclusion"},"3) Conclusion"),(0,o.kt)("p",null,"In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models."))}h.isMDXComponent=!0},9187:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},3278:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},3871:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[7376],{3905:(e,t,n)=>{n.d(t,{Zo:()=>p,kt:()=>u});var a=n(7294);function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function r(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var l=a.createContext({}),d=function(e){var t=a.useContext(l),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},p=function(e){var t=d(e.components);return a.createElement(l.Provider,{value:t},e.children)},c="mdxType",h={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},m=a.forwardRef((function(e,t){var n=e.components,o=e.mdxType,i=e.originalType,l=e.parentName,p=s(e,["components","mdxType","originalType","parentName"]),c=d(n),m=o,u=c["".concat(l,".").concat(m)]||c[m]||h[m]||i;return n?a.createElement(u,r(r({ref:t},p),{},{components:n})):a.createElement(u,r({ref:t},p))}));function u(e,t){var n=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var i=n.length,r=new Array(i);r[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[c]="string"==typeof e?e:o,r[1]=s;for(var d=2;d{n.r(t),n.d(t,{assets:()=>l,contentTitle:()=>r,default:()=>h,frontMatter:()=>i,metadata:()=>s,toc:()=>d});var a=n(7462),o=(n(7294),n(3905));const i={title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:new Date("2022-03-05T00:00:00.000Z"),contributors:["Jongseob Jeon"]},r=void 0,s={unversionedId:"introduction/intro",id:"introduction/intro",title:"1. What is MLOps?",description:"Introduction to MLOps",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/introduction/intro.md",sourceDirName:"introduction",slug:"/introduction/intro",permalink:"/en/docs/introduction/intro",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/introduction/intro.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:1,frontMatter:{title:"1. What is MLOps?",description:"Introduction to MLOps",sidebar_position:1,date:'2021-1./img to MLOps"',lastmod:"2022-03-05T00:00:00.000Z",contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",next:{title:"2. Levels of MLOps",permalink:"/en/docs/introduction/levels"}},l={},d=[{value:"Machine Learning Project",id:"machine-learning-project",level:2},{value:"Devops",id:"devops",level:2},{value:"DevOps",id:"devops-1",level:3},{value:"Silo Effect",id:"silo-effect",level:3},{value:"CI/CD",id:"cicd",level:3},{value:"MLOps",id:"mlops",level:2},{value:"1) ML + Ops",id:"1-ml--ops",level:3},{value:"Rule-Based Approach",id:"rule-based-approach",level:4},{value:"Machine Learning Approach",id:"machine-learning-approach",level:4},{value:"Deep Learning Approach",id:"deep-learning-approach",level:4},{value:"2) ML -> Ops",id:"2-ml---ops",level:3},{value:"3) Conclusion",id:"3-conclusion",level:3}],p={toc:d},c="wrapper";function h(e){let{components:t,...i}=e;return(0,o.kt)(c,(0,a.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"machine-learning-project"},"Machine Learning Project"),(0,o.kt)("p",null,"Since 2012, when Alexnet was introduced, Machine Learning and Deep Learning have been introduced in any domain where data exists, such as Computer Vision and Natural Language Processing. Deep Learning and Machine Learning were referred to collectively as AI, and the need for AI was shouted from many media. And many companies conducted numerous projects using Machine Learning and Deep Learning. But what was the result? Byungchan Eum, the Head of North East Asia at Element AI, said \u201cIf 10 companies start an AI project, 9 of them will only be able to do concept validation (POC)\u201d."),(0,o.kt)("p",null,"In this way, in many projects, Machine Learning and Deep Learning only showed the possibility that they could solve this problem and then disappeared. And around this time, the outlook that ",(0,o.kt)("a",{parentName:"p",href:"https://www.aifutures.org/2021/ai-winter-is-coming/"},"AI Winter was coming again")," also began to emerge."),(0,o.kt)("p",null,"Why did most projects end at the concept validation (POC) stage? Because it is impossible to operate an actual service with only Machine Learning and Deep Learning code."),(0,o.kt)("p",null,"At the actual service stage, the portion taken up by machine learning and deep learning code is not as large as one would think, so one must consider many other aspects besides simply the performance of the model. Google has pointed out this problem in their 2015 paper ",(0,o.kt)("a",{parentName:"p",href:"https://proceedings.neurips.cc/paper/2015/file/86df7dcfd896fcaf2674f757a2463eba-Paper.pdf"},"Hidden Technical Debt in Machine Learning Systems"),". However, at the time this paper was released, many ML engineers were busy proving the potential of deep learning and machine learning, so the points made in the paper were not given much attention. "),(0,o.kt)("p",null,"And after a few years, machine learning and deep learning had proven their potential and people were now looking to apply it to actual services. However, soon many people realized that actual services were not as easy as they thought."),(0,o.kt)("h2",{id:"devops"},"Devops"),(0,o.kt)("p",null,"MLOps is not a new concept, but rather a term derived from the development methodology called DevOps. Therefore, understanding DevOps can help in understanding MLOps."),(0,o.kt)("h3",{id:"devops-1"},"DevOps"),(0,o.kt)("p",null,'DevOps is a portmanteau of "Development" and "Operations," referring to a development and operations methodology that emphasizes communication, collaboration, and integration between software developers and IT professionals. It encompasses both the development and operation phases of software, aiming to achieve a symbiotic relationship between the two. The primary goal of DevOps is to enable organizations to develop and deploy software products and services rapidly by fostering close collaboration and interdependence between development and operations teams.'),(0,o.kt)("h3",{id:"silo-effect"},"Silo Effect"),(0,o.kt)("p",null,"Let's explore why DevOps is necessary through a simple scenario."),(0,o.kt)("p",null,"In the early stages of a service, there are fewer supported features, and the team or company is relatively small. At this point, there may not be a clear distinction between development and operations, or the teams may be small. The key point here is the small scale. In such cases, there are many points of contact for effective communication, and with a limited number of services to focus on, it is possible to rapidly improve the service."),(0,o.kt)("p",null,"However, as the service scales up, the development and operations teams tend to separate, and the physical limitations of communication channels become apparent. For example, in meetings involving multiple teams, only team leaders or a small number of seniors may attend, rather than the entire team. These limitations in communication channels inevitably lead to a lack of communication. Consequently, the development team continues to develop new features, while the operations team faces issues during deployment caused by the features developed by the development team."),(0,o.kt)("p",null,"When such situations are repeated, it can lead to organizational silos, a phenomenon known as silo mentality."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"silo",src:n(3871).Z,width:"892",height:"498"})),(0,o.kt)("blockquote",null,(0,o.kt)("p",{parentName:"blockquote"},'Indeed, the term "silo" originally refers to a tall, cylindrical structure used for storing grain or livestock feed. Silos are designed to keep the stored materials separate and prevent them from mixing.\nIn the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.')),(0,o.kt)("p",null,"The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations."),(0,o.kt)("h3",{id:"cicd"},"CI/CD"),(0,o.kt)("p",null,"Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"cicd",src:n(9187).Z,width:"1400",height:"299"})),(0,o.kt)("p",null,"Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams."),(0,o.kt)("h2",{id:"mlops"},"MLOps"),(0,o.kt)("h3",{id:"1-ml--ops"},"1) ML + Ops"),(0,o.kt)("p",null,"DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience."),(0,o.kt)("p",null,'MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.'),(0,o.kt)("p",null,"MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system."),(0,o.kt)("h4",{id:"rule-based-approach"},"Rule-Based Approach"),(0,o.kt)("p",null,"In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification."),(0,o.kt)("h4",{id:"machine-learning-approach"},"Machine Learning Approach"),(0,o.kt)("p",null,"As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed."),(0,o.kt)("h4",{id:"deep-learning-approach"},"Deep Learning Approach"),(0,o.kt)("p",null,"When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed."),(0,o.kt)("p",null,"By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"graph",src:n(3278).Z,width:"752",height:"582"})),(0,o.kt)("p",null,"If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning."),(0,o.kt)("p",null,"If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge."),(0,o.kt)("p",null,"Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively."),(0,o.kt)("h3",{id:"2-ml---ops"},"2) ML -> Ops"),(0,o.kt)("p",null,"However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects."),(0,o.kt)("p",null,"In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary."),(0,o.kt)("h3",{id:"3-conclusion"},"3) Conclusion"),(0,o.kt)("p",null,"In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models."))}h.isMDXComponent=!0},9187:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/cicd-775808741b1fa127eadb1fce55de3dab.png"},3278:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/graph-7329fb49fdf8c0b00d3c186386b5860e.png"},3871:(e,t,n)=>{n.d(t,{Z:()=>a});const a=n.p+"assets/images/silo-3cd9f9bdf17c846f82fd0dde78e01052.png"}}]); \ No newline at end of file diff --git a/en/assets/js/f7e73c15.4ed7714a.js b/en/assets/js/f7e73c15.b3b2513c.js similarity index 98% rename from en/assets/js/f7e73c15.4ed7714a.js rename to en/assets/js/f7e73c15.b3b2513c.js index a48fcdb1..90f3f178 100644 --- a/en/assets/js/f7e73c15.4ed7714a.js +++ b/en/assets/js/f7e73c15.b3b2513c.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2570],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=o.createContext({}),p=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},s=function(e){var t=p(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),d=p(n),f=a,m=d["".concat(c,".").concat(f)]||d[f]||u[f]||i;return n?o.createElement(m,r(r({ref:t},s),{},{components:n})):o.createElement(m,r({ref:t},s))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,r=new Array(i);r[0]=f;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[d]="string"==typeof e?e:a,r[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const i={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},r=void 0,l={unversionedId:"kubeflow/kubeflow-concepts",id:"kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/en/docs/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-concepts.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/en/docs/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/en/docs/kubeflow/basic-requirements"}},c={},p=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:p},d="wrapper";function u(e){let{components:t,...i}=e;return(0,a.kt)(d,(0,o.Z)({},s,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"component"},"Component"),(0,a.kt)("p",null,"A component is composed of Component contents and a Component wrapper.\nA single component is delivered to Kubeflow through a Component wrapper and the delivered component executes the defined Component contents and produces artifacts."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-0.png",src:n(4032).Z,width:"1392",height:"704"})),(0,a.kt)("h3",{id:"component-contents"},"Component Contents"),(0,a.kt)("p",null,"There are three components that make up the component contents:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-1.png",src:n(9667).Z,width:"574",height:"436"})),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"Environment"),(0,a.kt)("li",{parentName:"ol"},"Python code w/ Config"),(0,a.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,a.kt)("p",null,"Let's explore each component with an example.\nHere is a Python code that loads data, trains an SVC (Support Vector Classifier) model, and saves the SVC model."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,a.kt)("p",null,"The above Python code can be divided into components contents as follows."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-2.png",src:n(5319).Z,width:"832",height:"410"})),(0,a.kt)("p",null,"Environment is the part of the Python code where the packages used in the code are imported.",(0,a.kt)("br",{parentName:"p"}),"\n","Next, Python Code w\\ Config is where the given Config is used to actually perform the training.",(0,a.kt)("br",{parentName:"p"}),"\n","Finally, there is a process to save the artifacts. "),(0,a.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,a.kt)("p",null,"Component wrappers deliver the necessary Config and execute tasks for component content."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-3.png",src:n(6617).Z,width:"1066",height:"766"})),(0,a.kt)("p",null,"In Kubeflow, component wrappers are defined as functions, similar to the ",(0,a.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv")," example above.\nWhen a component wrapper wraps the contents, it looks like the following:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-4.png",src:n(3954).Z,width:"464",height:"826"})),(0,a.kt)("h3",{id:"artifacts"},"Artifacts"),(0,a.kt)("p",null,"In the explanation above, it was mentioned that the component creates Artifacts. Artifacts is a term used to refer to any form of a file that is generated, such as evaluation results, logs, etc.\nOf the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-5.png",src:n(4445).Z,width:"1700",height:"454"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model"),(0,a.kt)("li",{parentName:"ul"},"Data"),(0,a.kt)("li",{parentName:"ul"},"Metric"),(0,a.kt)("li",{parentName:"ul"},"etc")),(0,a.kt)("h4",{id:"model"},"Model"),(0,a.kt)("p",null,"We defined the model as follows: "),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.")),(0,a.kt)("h4",{id:"data"},"Data"),(0,a.kt)("p",null,"Data includes preprocessed features, model predictions, etc. "),(0,a.kt)("h4",{id:"metric"},"Metric"),(0,a.kt)("p",null,"Metric is divided into two categories: dynamic metrics and static metrics."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch."),(0,a.kt)("li",{parentName:"ul"},"Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.")),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-6.png",src:n(6443).Z,width:"1696",height:"746"})),(0,a.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,a.kt)("p",null,"As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-7.png",src:n(7013).Z,width:"1810",height:"432"})),(0,a.kt)("h2",{id:"run"},"Run"),(0,a.kt)("p",null,'To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."'),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-8.png",src:n(6719).Z,width:"1810",height:"576"})),(0,a.kt)("p",null,"When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-9.png",src:n(9382).Z,width:"1810",height:"592"})),(0,a.kt)("p",null,"Now, let's learn how to write components and pipelines."))}u.isMDXComponent=!0},4032:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},9667:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},5319:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},6617:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},3954:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},4445:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},6443:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},7013:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},6719:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},9382:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2570],{3905:(e,t,n)=>{n.d(t,{Zo:()=>s,kt:()=>m});var o=n(7294);function a(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function r(e){for(var t=1;t=0||(a[n]=e[n]);return a}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(o=0;o=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(a[n]=e[n])}return a}var c=o.createContext({}),p=function(e){var t=o.useContext(c),n=t;return e&&(n="function"==typeof e?e(t):r(r({},t),e)),n},s=function(e){var t=p(e.components);return o.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return o.createElement(o.Fragment,{},t)}},f=o.forwardRef((function(e,t){var n=e.components,a=e.mdxType,i=e.originalType,c=e.parentName,s=l(e,["components","mdxType","originalType","parentName"]),d=p(n),f=a,m=d["".concat(c,".").concat(f)]||d[f]||u[f]||i;return n?o.createElement(m,r(r({ref:t},s),{},{components:n})):o.createElement(m,r({ref:t},s))}));function m(e,t){var n=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var i=n.length,r=new Array(i);r[0]=f;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[d]="string"==typeof e?e:a,r[1]=l;for(var p=2;p{n.r(t),n.d(t,{assets:()=>c,contentTitle:()=>r,default:()=>u,frontMatter:()=>i,metadata:()=>l,toc:()=>p});var o=n(7462),a=(n(7294),n(3905));const i={title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},r=void 0,l={unversionedId:"kubeflow/kubeflow-concepts",id:"kubeflow/kubeflow-concepts",title:"2. Kubeflow Concepts",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/kubeflow-concepts.md",sourceDirName:"kubeflow",slug:"/kubeflow/kubeflow-concepts",permalink:"/en/docs/kubeflow/kubeflow-concepts",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/kubeflow-concepts.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:2,frontMatter:{title:"2. Kubeflow Concepts",description:"",sidebar_position:2,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"1. Kubeflow Introduction",permalink:"/en/docs/kubeflow/kubeflow-intro"},next:{title:"3. Install Requirements",permalink:"/en/docs/kubeflow/basic-requirements"}},c={},p=[{value:"Component",id:"component",level:2},{value:"Component Contents",id:"component-contents",level:3},{value:"Component Wrapper",id:"component-wrapper",level:3},{value:"Artifacts",id:"artifacts",level:3},{value:"Model",id:"model",level:4},{value:"Data",id:"data",level:4},{value:"Metric",id:"metric",level:4},{value:"Pipeline",id:"pipeline",level:2},{value:"Pipeline Config",id:"pipeline-config",level:3},{value:"Run",id:"run",level:2}],s={toc:p},d="wrapper";function u(e){let{components:t,...i}=e;return(0,a.kt)(d,(0,o.Z)({},s,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h2",{id:"component"},"Component"),(0,a.kt)("p",null,"A component is composed of Component contents and a Component wrapper.\nA single component is delivered to Kubeflow through a Component wrapper and the delivered component executes the defined Component contents and produces artifacts."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-0.png",src:n(4032).Z,width:"1392",height:"704"})),(0,a.kt)("h3",{id:"component-contents"},"Component Contents"),(0,a.kt)("p",null,"There are three components that make up the component contents:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-1.png",src:n(9667).Z,width:"574",height:"436"})),(0,a.kt)("ol",null,(0,a.kt)("li",{parentName:"ol"},"Environment"),(0,a.kt)("li",{parentName:"ol"},"Python code w/ Config"),(0,a.kt)("li",{parentName:"ol"},"Generates Artifacts")),(0,a.kt)("p",null,"Let's explore each component with an example.\nHere is a Python code that loads data, trains an SVC (Support Vector Classifier) model, and saves the SVC model."),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-python"},'import dill\nimport pandas as pd\n\nfrom sklearn.svm import SVC\n\ntrain_data = pd.read_csv(train_data_path)\ntrain_target= pd.read_csv(train_target_path)\n\nclf= SVC(\n kernel=kernel\n)\nclf.fit(train_data)\n\nwith open(model_path, mode="wb") as file_writer:\n dill.dump(clf, file_writer)\n')),(0,a.kt)("p",null,"The above Python code can be divided into components contents as follows."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-2.png",src:n(5319).Z,width:"832",height:"410"})),(0,a.kt)("p",null,"Environment is the part of the Python code where the packages used in the code are imported.",(0,a.kt)("br",{parentName:"p"}),"\n","Next, Python Code w\\ Config is where the given Config is used to actually perform the training.",(0,a.kt)("br",{parentName:"p"}),"\n","Finally, there is a process to save the artifacts. "),(0,a.kt)("h3",{id:"component-wrapper"},"Component Wrapper"),(0,a.kt)("p",null,"Component wrappers deliver the necessary Config and execute tasks for component content."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-3.png",src:n(6617).Z,width:"1066",height:"766"})),(0,a.kt)("p",null,"In Kubeflow, component wrappers are defined as functions, similar to the ",(0,a.kt)("inlineCode",{parentName:"p"},"train_svc_from_csv")," example above.\nWhen a component wrapper wraps the contents, it looks like the following:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-4.png",src:n(3954).Z,width:"464",height:"826"})),(0,a.kt)("h3",{id:"artifacts"},"Artifacts"),(0,a.kt)("p",null,"In the explanation above, it was mentioned that the component creates Artifacts. Artifacts is a term used to refer to any form of a file that is generated, such as evaluation results, logs, etc.\nOf the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-5.png",src:n(4445).Z,width:"1700",height:"454"})),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Model"),(0,a.kt)("li",{parentName:"ul"},"Data"),(0,a.kt)("li",{parentName:"ul"},"Metric"),(0,a.kt)("li",{parentName:"ul"},"etc")),(0,a.kt)("h4",{id:"model"},"Model"),(0,a.kt)("p",null,"We defined the model as follows: "),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.")),(0,a.kt)("h4",{id:"data"},"Data"),(0,a.kt)("p",null,"Data includes preprocessed features, model predictions, etc. "),(0,a.kt)("h4",{id:"metric"},"Metric"),(0,a.kt)("p",null,"Metric is divided into two categories: dynamic metrics and static metrics."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch."),(0,a.kt)("li",{parentName:"ul"},"Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.")),(0,a.kt)("h2",{id:"pipeline"},"Pipeline"),(0,a.kt)("p",null,"A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-6.png",src:n(6443).Z,width:"1696",height:"746"})),(0,a.kt)("h3",{id:"pipeline-config"},"Pipeline Config"),(0,a.kt)("p",null,"As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-7.png",src:n(7013).Z,width:"1810",height:"432"})),(0,a.kt)("h2",{id:"run"},"Run"),(0,a.kt)("p",null,'To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."'),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-8.png",src:n(6719).Z,width:"1810",height:"576"})),(0,a.kt)("p",null,"When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored."),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"concept-9.png",src:n(9382).Z,width:"1810",height:"592"})),(0,a.kt)("p",null,"Now, let's learn how to write components and pipelines."))}u.isMDXComponent=!0},4032:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-0-c3636a3fe20bb4a74d64d8565b4a51d9.png"},9667:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-1-887ac07d1b11b84ee3fc5d7b882ad4bc.png"},5319:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-2-00e4917a1ec11cff7fc7a3b00c75a9e9.png"},6617:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-3-0916d8982b42a638e986fd955f4b5fd0.png"},3954:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-4-3e6a8ee159e889b5e1bffc58dbb24b85.png"},4445:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-5-31eb60d97518af020d18d30e3b5c5d16.png"},6443:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-6-db0ab4d56f11dcad062bb89374f7ff5b.png"},7013:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-7-277a9b30da3a2fc3519d3453964c5d52.png"},6719:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-8-2350dff71d7f031b8cce3b73f8fd4381.png"},9382:(e,t,n)=>{n.d(t,{Z:()=>o});const o=n.p+"assets/images/concept-9-f366186846ec1d019b742bf478928f80.png"}}]); \ No newline at end of file diff --git a/en/assets/js/fac3f613.145439b5.js b/en/assets/js/fac3f613.7fdb1be1.js similarity index 98% rename from en/assets/js/fac3f613.145439b5.js rename to en/assets/js/fac3f613.7fdb1be1.js index a6a87842..3b902953 100644 --- a/en/assets/js/fac3f613.145439b5.js +++ b/en/assets/js/fac3f613.7fdb1be1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2851],{3905:(e,i,t)=>{t.d(i,{Zo:()=>c,kt:()=>b});var n=t(7294);function l(e,i,t){return i in e?Object.defineProperty(e,i,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[i]=t,e}function p(e,i){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);i&&(n=n.filter((function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable}))),t.push.apply(t,n)}return t}function a(e){for(var i=1;i=0||(l[t]=e[t]);return l}(e,i);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var r=n.createContext({}),s=function(e){var i=n.useContext(r),t=i;return e&&(t="function"==typeof e?e(i):a(a({},i),e)),t},c=function(e){var i=s(e.components);return n.createElement(r.Provider,{value:i},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var i=e.children;return n.createElement(n.Fragment,{},i)}},g=n.forwardRef((function(e,i){var t=e.components,l=e.mdxType,p=e.originalType,r=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),u=s(t),g=l,b=u["".concat(r,".").concat(g)]||u[g]||d[g]||p;return t?n.createElement(b,a(a({ref:i},c),{},{components:t})):n.createElement(b,a({ref:i},c))}));function b(e,i){var t=arguments,l=i&&i.mdxType;if("string"==typeof e||l){var p=t.length,a=new Array(p);a[0]=g;var o={};for(var r in i)hasOwnProperty.call(i,r)&&(o[r]=i[r]);o.originalType=e,o[u]="string"==typeof e?e:l,a[1]=o;for(var s=2;s{t.r(i),t.d(i,{assets:()=>r,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>s});var n=t(7462),l=(t(7294),t(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,o={unversionedId:"kubeflow/basic-pipeline-upload",id:"kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/en/docs/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline-upload.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/en/docs/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/en/docs/kubeflow/basic-run"}},r={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Click Pipelines Tab",id:"1-click-pipelines-tab",level:3},{value:"2. Click Upload Pipeline",id:"2-click-upload-pipeline",level:3},{value:"3. Click Choose file",id:"3-click-choose-file",level:3},{value:"4. Upload created yaml file",id:"4-upload-created-yaml-file",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:i,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:i,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"Now, let's upload the pipeline we created directly to kubeflow.",(0,l.kt)("br",{parentName:"p"}),"\n","Pipeline uploads can be done through the kubeflow dashboard UI.\nUse the method used in ",(0,l.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-kf"},"Install Kubeflow")," to do port forwarding."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,"Access ",(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to open the dashboard."),(0,l.kt)("h3",{id:"1-click-pipelines-tab"},"1. Click Pipelines Tab"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:t(7411).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-click-upload-pipeline"},"2. Click Upload Pipeline"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:t(5357).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-click-choose-file"},"3. Click Choose file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:t(8569).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-upload-created-yaml-file"},"4. Upload created yaml file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:t(2502).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(2812).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"The uploaded pipeline allows you to manage versions through uploads. However, it serves the role of gathering pipelines with the same name rather than version management at the code level, such as Github.\nIn the example above, clicking on example_pipeline will bring up the following screen."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:t(9986).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click this screen shows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(2812).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click Upload Version, a screen appears where you can upload the pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:t(9495).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Now, upload your pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:t(5447).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Once uploaded, you can check the pipeline version as follows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:t(4931).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},7411:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},5357:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},8569:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},2502:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},2812:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},9986:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},9495:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},5447:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},4931:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[2851],{3905:(e,i,t)=>{t.d(i,{Zo:()=>c,kt:()=>b});var n=t(7294);function l(e,i,t){return i in e?Object.defineProperty(e,i,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[i]=t,e}function p(e,i){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);i&&(n=n.filter((function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable}))),t.push.apply(t,n)}return t}function a(e){for(var i=1;i=0||(l[t]=e[t]);return l}(e,i);if(Object.getOwnPropertySymbols){var p=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var r=n.createContext({}),s=function(e){var i=n.useContext(r),t=i;return e&&(t="function"==typeof e?e(i):a(a({},i),e)),t},c=function(e){var i=s(e.components);return n.createElement(r.Provider,{value:i},e.children)},u="mdxType",d={inlineCode:"code",wrapper:function(e){var i=e.children;return n.createElement(n.Fragment,{},i)}},g=n.forwardRef((function(e,i){var t=e.components,l=e.mdxType,p=e.originalType,r=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),u=s(t),g=l,b=u["".concat(r,".").concat(g)]||u[g]||d[g]||p;return t?n.createElement(b,a(a({ref:i},c),{},{components:t})):n.createElement(b,a({ref:i},c))}));function b(e,i){var t=arguments,l=i&&i.mdxType;if("string"==typeof e||l){var p=t.length,a=new Array(p);a[0]=g;var o={};for(var r in i)hasOwnProperty.call(i,r)&&(o[r]=i[r]);o.originalType=e,o[u]="string"==typeof e?e:l,a[1]=o;for(var s=2;s{t.r(i),t.d(i,{assets:()=>r,contentTitle:()=>a,default:()=>d,frontMatter:()=>p,metadata:()=>o,toc:()=>s});var n=t(7462),l=(t(7294),t(3905));const p={title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},a=void 0,o={unversionedId:"kubeflow/basic-pipeline-upload",id:"kubeflow/basic-pipeline-upload",title:"6. Pipeline - Upload",description:"",source:"@site/i18n/en/docusaurus-plugin-content-docs/current/kubeflow/basic-pipeline-upload.md",sourceDirName:"kubeflow",slug:"/kubeflow/basic-pipeline-upload",permalink:"/en/docs/kubeflow/basic-pipeline-upload",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/docs/kubeflow/basic-pipeline-upload.md",tags:[],version:"current",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:6,frontMatter:{title:"6. Pipeline - Upload",description:"",sidebar_position:6,contributors:["Jongseob Jeon"]},sidebar:"tutorialSidebar",previous:{title:"5. Pipeline - Write",permalink:"/en/docs/kubeflow/basic-pipeline"},next:{title:"7. Pipeline - Run",permalink:"/en/docs/kubeflow/basic-run"}},r={},s=[{value:"Upload Pipeline",id:"upload-pipeline",level:2},{value:"1. Click Pipelines Tab",id:"1-click-pipelines-tab",level:3},{value:"2. Click Upload Pipeline",id:"2-click-upload-pipeline",level:3},{value:"3. Click Choose file",id:"3-click-choose-file",level:3},{value:"4. Upload created yaml file",id:"4-upload-created-yaml-file",level:3},{value:"5. Create",id:"5-create",level:3},{value:"Upload Pipeline Version",id:"upload-pipeline-version",level:2}],c={toc:s},u="wrapper";function d(e){let{components:i,...p}=e;return(0,l.kt)(u,(0,n.Z)({},c,p,{components:i,mdxType:"MDXLayout"}),(0,l.kt)("h2",{id:"upload-pipeline"},"Upload Pipeline"),(0,l.kt)("p",null,"Now, let's upload the pipeline we created directly to kubeflow.",(0,l.kt)("br",{parentName:"p"}),"\n","Pipeline uploads can be done through the kubeflow dashboard UI.\nUse the method used in ",(0,l.kt)("a",{parentName:"p",href:"/en/docs/setup-components/install-components-kf"},"Install Kubeflow")," to do port forwarding."),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-bash"},"kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80\n")),(0,l.kt)("p",null,"Access ",(0,l.kt)("a",{parentName:"p",href:"http://localhost:8080"},"http://localhost:8080")," to open the dashboard."),(0,l.kt)("h3",{id:"1-click-pipelines-tab"},"1. Click Pipelines Tab"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-0.png",src:t(7411).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"2-click-upload-pipeline"},"2. Click Upload Pipeline"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-1.png",src:t(5357).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"3-click-choose-file"},"3. Click Choose file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-2.png",src:t(8569).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"4-upload-created-yaml-file"},"4. Upload created yaml file"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-3.png",src:t(2502).Z,width:"3360",height:"2100"})),(0,l.kt)("h3",{id:"5-create"},"5. Create"),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(2812).Z,width:"3360",height:"2100"})),(0,l.kt)("h2",{id:"upload-pipeline-version"},"Upload Pipeline Version"),(0,l.kt)("p",null,"The uploaded pipeline allows you to manage versions through uploads. However, it serves the role of gathering pipelines with the same name rather than version management at the code level, such as Github.\nIn the example above, clicking on example_pipeline will bring up the following screen."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-5.png",src:t(9986).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click this screen shows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-4.png",src:t(2812).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"If you click Upload Version, a screen appears where you can upload the pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-6.png",src:t(9495).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Now, upload your pipeline."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-7.png",src:t(5447).Z,width:"3360",height:"2100"})),(0,l.kt)("p",null,"Once uploaded, you can check the pipeline version as follows."),(0,l.kt)("p",null,(0,l.kt)("img",{alt:"pipeline-gui-8.png",src:t(4931).Z,width:"3360",height:"2100"})))}d.isMDXComponent=!0},7411:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-0-f7b76be96957b718745ed2097584c522.png"},5357:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-1-db1f71e3803fa7f7864928391e5b515e.png"},8569:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-2-3ebafe6d26ce8382bed6c39fdb949ffc.png"},2502:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-3-39b0f036fc76c0832ea02dc835db627a.png"},2812:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-4-c6013b589b7ab9ec9b83fbbb68f41b2d.png"},9986:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-5-0b90b4869ebaf0654826f5763609e34a.png"},9495:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-6-2a94de3824c6e38732d1d18ecb4b7d10.png"},5447:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-7-af0c439edb4ba0f0b7d7e11488d9c971.png"},4931:(e,i,t)=>{t.d(i,{Z:()=>n});const n=t.p+"assets/images/pipeline-gui-8-2aecbdbeaa0c064cb224d77c268717ca.png"}}]); \ No newline at end of file diff --git a/en/assets/js/fe186c37.64f45682.js b/en/assets/js/fe186c37.6da54db6.js similarity index 98% rename from en/assets/js/fe186c37.64f45682.js rename to en/assets/js/fe186c37.6da54db6.js index 34c89cd3..164a4b2c 100644 --- a/en/assets/js/fe186c37.64f45682.js +++ b/en/assets/js/fe186c37.6da54db6.js @@ -1 +1 @@ -"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5839],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>k});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),c=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),p=c(r),m=o,k=p["".concat(l,".").concat(m)]||p[m]||d[m]||a;return r?n.createElement(k,i(i({ref:t},u),{},{components:r})):n.createElement(k,i({ref:t},u))}));function k(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/docker",id:"version-1.0/prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/en/docs/1.0/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/docker.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1689038498,formattedLastUpdatedAt:"Jul 11, 2023",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/1.0/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/en/docs/1.0/prerequisites/docker/command"}},l={},c=[{value:"Container",id:"container",level:2},{value:"Docker",id:"docker",level:2},{value:"Interpretation of Layer",id:"interpretation-of-layer",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Containerization:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A technology that allows applications to be executed uniformly anywhere."))),(0,o.kt)("li",{parentName:"ul"},"Container Image:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A collection of all the files required to run an application."),(0,o.kt)("li",{parentName:"ul"},"\u2192 Similar to a mold for making fish-shaped bread (Bungeoppang)."))),(0,o.kt)("li",{parentName:"ul"},"Container:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A single process that is executed based on a container image."),(0,o.kt)("li",{parentName:"ul"},"\u2192 A fish-shaped bread (Bungeoppang) produced using a mold.")))),(0,o.kt)("h2",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker is a platform that allows you to manage and use containers.",(0,o.kt)("br",{parentName:"p"}),"\n",'Its slogan is "Build Once, Run Anywhere," guaranteeing the same execution results anywhere.'),(0,o.kt)("p",null,"In the Docker, the resources for the container are separated and the lifecycle is controlled by Linux kernel's cgroups, etc.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it is too difficult to use these interfaces directly, so an abstraction layer is created."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(2297).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"Through this, users can easily control containers with just the user-friendly API ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Users can easily control containers using the user-friendly API called ",(0,o.kt)("strong",{parentName:"li"},"Docker CLI"),".")),(0,o.kt)("h2",{id:"interpretation-of-layer"},"Interpretation of Layer"),(0,o.kt)("p",null,"The roles of the layers mentioned above are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: Utilizes the functionality of the Linux kernel to isolate namespaces, CPUs, memory, filesystems, etc., for a container, which is a single process."),(0,o.kt)("li",{parentName:"ol"},"containerd: Acts as an abstraction layer to communicate with runC (OCI layer) and uses the standardized interface (OCI)."),(0,o.kt)("li",{parentName:"ol"},"dockerd: Solely responsible for issuing commands to containerd."),(0,o.kt)("li",{parentName:"ol"},"Docker CLI: Users only need to issue commands to dockerd (Docker daemon) using Docker CLI.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},'During this communication process, Unix socket is used, so sometimes Docker-related errors occur, such as "the /var/run/docker.sock is in use" or "insufficient permissions" error messages.')))),(0,o.kt)("p",null,'Although Docker encompasses many stages, when the term "Docker" is used, it can refer to Docker CLI, Dockerd (Docker daemon), or even a single Docker container, which can lead to confusion.',(0,o.kt)("br",{parentName:"p"}),"\n",'In the upcoming text, the term "Docker" may be used in various contexts.'),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"ML engineers use Docker for the following reasons:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages."),(0,o.kt)("li",{parentName:"ol"},"Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this."),(0,o.kt)("li",{parentName:"ol"},"Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.")))}d.isMDXComponent=!0},2297:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkv_2=self.webpackChunkv_2||[]).push([[5839],{3905:(e,t,r)=>{r.d(t,{Zo:()=>u,kt:()=>k});var n=r(7294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=n.createContext({}),c=function(e){var t=n.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=c(e.components);return n.createElement(l.Provider,{value:t},e.children)},p="mdxType",d={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,l=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),p=c(r),m=o,k=p["".concat(l,".").concat(m)]||p[m]||d[m]||a;return r?n.createElement(k,i(i({ref:t},u),{},{components:r})):n.createElement(k,i({ref:t},u))}));function k(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=m;var s={};for(var l in t)hasOwnProperty.call(t,l)&&(s[l]=t[l]);s.originalType=e,s[p]="string"==typeof e?e:o,i[1]=s;for(var c=2;c{r.r(t),r.d(t,{assets:()=>l,contentTitle:()=>i,default:()=>d,frontMatter:()=>a,metadata:()=>s,toc:()=>c});var n=r(7462),o=(r(7294),r(3905));const a={title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},i=void 0,s={unversionedId:"prerequisites/docker/docker",id:"version-1.0/prerequisites/docker/docker",title:"What is Docker?",description:"Introduction to Docker.",source:"@site/i18n/en/docusaurus-plugin-content-docs/version-1.0/prerequisites/docker/docker.md",sourceDirName:"prerequisites/docker",slug:"/prerequisites/docker/",permalink:"/en/docs/1.0/prerequisites/docker/",draft:!1,editUrl:"https://github.com/mlops-for-all/mlops-for-all.github.io/tree/main/versioned_docs/version-1.0/prerequisites/docker/docker.md",tags:[],version:"1.0",lastUpdatedBy:"Aiden-Jeon",lastUpdatedAt:1692331370,formattedLastUpdatedAt:"Aug 18, 2023",sidebarPosition:3,frontMatter:{title:"What is Docker?",description:"Introduction to Docker.",sidebar_position:3,contributors:["Jongseob Jeon","Jaeyeon Kim"]},sidebar:"preSidebar",previous:{title:"Why Docker & Kubernetes ?",permalink:"/en/docs/1.0/prerequisites/docker/introduction"},next:{title:"[Practice] Docker command",permalink:"/en/docs/1.0/prerequisites/docker/command"}},l={},c=[{value:"Container",id:"container",level:2},{value:"Docker",id:"docker",level:2},{value:"Interpretation of Layer",id:"interpretation-of-layer",level:2},{value:"For ML Engineer",id:"for-ml-engineer",level:2}],u={toc:c},p="wrapper";function d(e){let{components:t,...a}=e;return(0,o.kt)(p,(0,n.Z)({},u,a,{components:t,mdxType:"MDXLayout"}),(0,o.kt)("h2",{id:"container"},"Container"),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Containerization:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A technology that allows applications to be executed uniformly anywhere."))),(0,o.kt)("li",{parentName:"ul"},"Container Image:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A collection of all the files required to run an application."),(0,o.kt)("li",{parentName:"ul"},"\u2192 Similar to a mold for making fish-shaped bread (Bungeoppang)."))),(0,o.kt)("li",{parentName:"ul"},"Container:",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},"A single process that is executed based on a container image."),(0,o.kt)("li",{parentName:"ul"},"\u2192 A fish-shaped bread (Bungeoppang) produced using a mold.")))),(0,o.kt)("h2",{id:"docker"},"Docker"),(0,o.kt)("p",null,"Docker is a platform that allows you to manage and use containers.",(0,o.kt)("br",{parentName:"p"}),"\n",'Its slogan is "Build Once, Run Anywhere," guaranteeing the same execution results anywhere.'),(0,o.kt)("p",null,"In the Docker, the resources for the container are separated and the lifecycle is controlled by Linux kernel's cgroups, etc.",(0,o.kt)("br",{parentName:"p"}),"\n","However, it is too difficult to use these interfaces directly, so an abstraction layer is created."),(0,o.kt)("p",null,(0,o.kt)("img",{alt:"docker-layer.png",src:r(2297).Z,width:"574",height:"455"})),(0,o.kt)("p",null,"Through this, users can easily control containers with just the user-friendly API ",(0,o.kt)("strong",{parentName:"p"},"Docker CLI"),"."),(0,o.kt)("ul",null,(0,o.kt)("li",{parentName:"ul"},"Users can easily control containers using the user-friendly API called ",(0,o.kt)("strong",{parentName:"li"},"Docker CLI"),".")),(0,o.kt)("h2",{id:"interpretation-of-layer"},"Interpretation of Layer"),(0,o.kt)("p",null,"The roles of the layers mentioned above are as follows:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"runC: Utilizes the functionality of the Linux kernel to isolate namespaces, CPUs, memory, filesystems, etc., for a container, which is a single process."),(0,o.kt)("li",{parentName:"ol"},"containerd: Acts as an abstraction layer to communicate with runC (OCI layer) and uses the standardized interface (OCI)."),(0,o.kt)("li",{parentName:"ol"},"dockerd: Solely responsible for issuing commands to containerd."),(0,o.kt)("li",{parentName:"ol"},"Docker CLI: Users only need to issue commands to dockerd (Docker daemon) using Docker CLI.",(0,o.kt)("ul",{parentName:"li"},(0,o.kt)("li",{parentName:"ul"},'During this communication process, Unix socket is used, so sometimes Docker-related errors occur, such as "the /var/run/docker.sock is in use" or "insufficient permissions" error messages.')))),(0,o.kt)("p",null,'Although Docker encompasses many stages, when the term "Docker" is used, it can refer to Docker CLI, Dockerd (Docker daemon), or even a single Docker container, which can lead to confusion.',(0,o.kt)("br",{parentName:"p"}),"\n",'In the upcoming text, the term "Docker" may be used in various contexts.'),(0,o.kt)("h2",{id:"for-ml-engineer"},"For ML Engineer"),(0,o.kt)("p",null,"ML engineers use Docker for the following reasons:"),(0,o.kt)("ol",null,(0,o.kt)("li",{parentName:"ol"},"ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages."),(0,o.kt)("li",{parentName:"ol"},"Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this."),(0,o.kt)("li",{parentName:"ol"},"Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.")))}d.isMDXComponent=!0},2297:(e,t,r)=>{r.d(t,{Z:()=>n});const n=r.p+"assets/images/docker-layer-223ebf4a5bacfe912f92117606e17ac2.png"}}]); \ No newline at end of file diff --git a/en/assets/js/runtime~main.b3197c6c.js b/en/assets/js/runtime~main.2a52cdef.js similarity index 78% rename from en/assets/js/runtime~main.b3197c6c.js rename to en/assets/js/runtime~main.2a52cdef.js index 92db6cff..186067ef 100644 --- a/en/assets/js/runtime~main.b3197c6c.js +++ b/en/assets/js/runtime~main.2a52cdef.js @@ -1 +1 @@ -(()=>{"use strict";var e,a,d,f,c,b={},t={};function r(e){var a=t[e];if(void 0!==a)return a.exports;var d=t[e]={id:e,loaded:!1,exports:{}};return b[e].call(d.exports,d,d.exports,r),d.loaded=!0,d.exports}r.m=b,r.c=t,e=[],r.O=(a,d,f,c)=>{if(!d){var b=1/0;for(i=0;i=c)&&Object.keys(r.O).every((e=>r.O[e](d[o])))?d.splice(o--,1):(t=!1,c0&&e[i-1][2]>c;i--)e[i]=e[i-1];e[i]=[d,f,c]},r.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return r.d(a,{a:a}),a},d=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,r.t=function(e,f){if(1&f&&(e=this(e)),8&f)return e;if("object"==typeof e&&e){if(4&f&&e.__esModule)return e;if(16&f&&"function"==typeof e.then)return e}var c=Object.create(null);r.r(c);var b={};a=a||[null,d({}),d([]),d(d)];for(var t=2&f&&e;"object"==typeof t&&!~a.indexOf(t);t=d(t))Object.getOwnPropertyNames(t).forEach((a=>b[a]=()=>e[a]));return b.default=()=>e,r.d(c,b),c},r.d=(e,a)=>{for(var d in a)r.o(a,d)&&!r.o(e,d)&&Object.defineProperty(e,d,{enumerable:!0,get:a[d]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce(((a,d)=>(r.f[d](e,a),a)),[])),r.u=e=>"assets/js/"+({53:"935f2afb",451:"e415f9f6",463:"e68a1c9e",605:"4252e969",697:"311b36d9",924:"2842b95f",1011:"77df73f1",1032:"5d928751",1033:"ef82f9f8",1060:"0e729158",1229:"52a0bca6",1414:"92551a41",1512:"806f62a0",1607:"f2563ea8",1684:"30bad7fd",1714:"1f819a6a",1723:"e68086c7",1973:"cd8cc8f6",2011:"c0f17dd2",2032:"6a39bdb7",2052:"c83b8faa",2265:"56b79ddf",2300:"45ae3dfd",2461:"8db8515f",2513:"be794740",2515:"0096c9e8",2527:"bcbfd5bd",2570:"f7e73c15",2571:"d0c9c887",2657:"352e0155",2759:"02b9e606",2851:"fac3f613",2984:"4c6b0ea3",2996:"ae95ad8d",3085:"1f391b9e",3222:"2de7f827",3237:"1df93b7f",3552:"2e8b9598",3856:"b54de702",3940:"98e51aae",4051:"323a8b36",4095:"24605d3f",4141:"55e75476",4370:"e092da67",4447:"9bd4ad20",4510:"ea288814",4586:"74126281",4600:"d6cfd461",4604:"34be08f6",4660:"2b1aa4ae",4826:"64101c1c",4994:"593df1f8",5036:"404a71d4",5101:"5ccc0acb",5134:"bc53d220",5371:"1d540fc8",5424:"64f10cae",5430:"82f8e163",5497:"52a462e1",5520:"81a7ed24",5597:"a243e695",5696:"b108acf9",5829:"63323f2d",5839:"fe186c37",5867:"9c5e90dd",5878:"9c3963e5",5988:"b3d231d1",6005:"2b3f5e4d",6210:"656f3db8",6297:"b0207dc0",6490:"51a35976",6614:"a1ee4268",6628:"d93ec163",6680:"8657d6b7",6749:"615db352",6780:"d9ba8899",6863:"52b91c1d",6997:"2ded1a41",7005:"160bf777",7053:"6d1a6fc6",7189:"302c3da2",7298:"d9523fd4",7306:"3d1e1011",7376:"f748dfb1",7414:"393be207",7465:"99b17c27",7525:"74d04fec",7549:"2369f063",7558:"0ec0ba76",7616:"306a8c6c",7628:"afecfb43",7775:"11b44e77",7904:"4f70ae63",7918:"17896441",7964:"81a92311",7966:"85e11584",7977:"a6269ae6",7986:"1aa635cc",8225:"d7dc9408",8367:"d10c9a0a",8376:"cf42168c",8424:"1a5d547c",9010:"af806db3",9201:"8111fb61",9259:"ab4ab49e",9287:"20a999a7",9366:"8687dcee",9371:"89ac38ee",9512:"9a3eef67",9514:"1be78505",9680:"d1b5315b",9699:"81e9ac91",9800:"10a35dc9",9945:"607d38b2"}[e]||e)+"."+{53:"1bc18810",451:"ef0ab99f",463:"1459ba26",605:"98aeac05",697:"85a78d94",924:"9ae3b3ef",1011:"b5753fa9",1032:"1f66856a",1033:"46924cf3",1060:"e6781735",1229:"ea84c72f",1414:"85212c92",1512:"5e34d646",1607:"bb4530a6",1684:"969a1b95",1714:"96d064e5",1723:"51b60e7c",1973:"f9f78fcd",2011:"234971fa",2032:"454b7333",2052:"9ac2feda",2265:"ee67b1c4",2300:"4b89cb0e",2461:"1e663163",2513:"fb08cea8",2515:"9718f7f6",2527:"4f89a53d",2570:"4ed7714a",2571:"d875b8ef",2657:"480b3ee9",2759:"788d344d",2851:"145439b5",2984:"3235ee4b",2996:"e2d19c21",3085:"50df1a02",3222:"ede8fe61",3237:"c4fb40a2",3552:"284472ba",3856:"659327f3",3940:"9c1e81fb",4051:"9e40bb6c",4095:"66b3435d",4141:"228cf0b1",4370:"960eb7ac",4447:"b7b68fbf",4510:"9cec2943",4586:"013b4470",4600:"a151f46b",4604:"8130cb95",4660:"f8687437",4826:"6e4f5a73",4972:"9218459b",4994:"d96bde14",5036:"b23225a2",5101:"de35c7a6",5134:"5f9a90b4",5371:"639ff8f3",5424:"54e3ef79",5430:"96f11516",5497:"9f608ae6",5520:"b0469b80",5597:"1102efb3",5696:"81c43433",5829:"e0486a3a",5839:"64f45682",5867:"c2076166",5878:"1f415d9e",5988:"e9ff2ae9",6005:"a9ffc145",6210:"d9ffe346",6297:"8ddf0b5a",6490:"2ca09bec",6614:"4867f838",6628:"4f350bad",6680:"96da70e6",6749:"99b99ab3",6780:"c6bf45ce",6863:"00a55d30",6997:"470748c1",7005:"f9f374b7",7053:"2bbcd495",7189:"aaebe2da",7298:"de07ede6",7306:"1a6a061e",7376:"0d39b6f2",7414:"5ed6e6ec",7465:"f01f4e76",7525:"2f95ae86",7549:"92f4c018",7558:"529dc39b",7616:"c5173e72",7628:"c0aade7a",7775:"d3ff2fcd",7904:"a90f78eb",7918:"d8f6f22a",7964:"87cf079a",7966:"6de621bb",7977:"d9fc6419",7986:"5ce763b5",8225:"cbf64d96",8367:"8a4b91cb",8376:"a95d6bd9",8424:"d3e9f87a",9010:"1f3ad8b3",9201:"d690c202",9259:"65fea98d",9287:"84992f27",9366:"2d3a81a1",9371:"474c210d",9455:"bfee0bcc",9512:"c5e96caf",9514:"f046b65b",9680:"ee12d5ba",9699:"74384f64",9800:"602a320d",9945:"e9895193"}[e]+".js",r.miniCssF=e=>{},r.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),r.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),f={},c="v-2:",r.l=(e,a,d,b)=>{if(f[e])f[e].push(a);else{var t,o;if(void 0!==d)for(var n=document.getElementsByTagName("script"),i=0;i{t.onerror=t.onload=null,clearTimeout(s);var c=f[e];if(delete f[e],t.parentNode&&t.parentNode.removeChild(t),c&&c.forEach((e=>e(d))),a)return a(d)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:t}),12e4);t.onerror=l.bind(null,t.onerror),t.onload=l.bind(null,t.onload),o&&document.head.appendChild(t)}},r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.p="/en/",r.gca=function(e){return e={17896441:"7918",74126281:"4586","935f2afb":"53",e415f9f6:"451",e68a1c9e:"463","4252e969":"605","311b36d9":"697","2842b95f":"924","77df73f1":"1011","5d928751":"1032",ef82f9f8:"1033","0e729158":"1060","52a0bca6":"1229","92551a41":"1414","806f62a0":"1512",f2563ea8:"1607","30bad7fd":"1684","1f819a6a":"1714",e68086c7:"1723",cd8cc8f6:"1973",c0f17dd2:"2011","6a39bdb7":"2032",c83b8faa:"2052","56b79ddf":"2265","45ae3dfd":"2300","8db8515f":"2461",be794740:"2513","0096c9e8":"2515",bcbfd5bd:"2527",f7e73c15:"2570",d0c9c887:"2571","352e0155":"2657","02b9e606":"2759",fac3f613:"2851","4c6b0ea3":"2984",ae95ad8d:"2996","1f391b9e":"3085","2de7f827":"3222","1df93b7f":"3237","2e8b9598":"3552",b54de702:"3856","98e51aae":"3940","323a8b36":"4051","24605d3f":"4095","55e75476":"4141",e092da67:"4370","9bd4ad20":"4447",ea288814:"4510",d6cfd461:"4600","34be08f6":"4604","2b1aa4ae":"4660","64101c1c":"4826","593df1f8":"4994","404a71d4":"5036","5ccc0acb":"5101",bc53d220:"5134","1d540fc8":"5371","64f10cae":"5424","82f8e163":"5430","52a462e1":"5497","81a7ed24":"5520",a243e695:"5597",b108acf9:"5696","63323f2d":"5829",fe186c37:"5839","9c5e90dd":"5867","9c3963e5":"5878",b3d231d1:"5988","2b3f5e4d":"6005","656f3db8":"6210",b0207dc0:"6297","51a35976":"6490",a1ee4268:"6614",d93ec163:"6628","8657d6b7":"6680","615db352":"6749",d9ba8899:"6780","52b91c1d":"6863","2ded1a41":"6997","160bf777":"7005","6d1a6fc6":"7053","302c3da2":"7189",d9523fd4:"7298","3d1e1011":"7306",f748dfb1:"7376","393be207":"7414","99b17c27":"7465","74d04fec":"7525","2369f063":"7549","0ec0ba76":"7558","306a8c6c":"7616",afecfb43:"7628","11b44e77":"7775","4f70ae63":"7904","81a92311":"7964","85e11584":"7966",a6269ae6:"7977","1aa635cc":"7986",d7dc9408:"8225",d10c9a0a:"8367",cf42168c:"8376","1a5d547c":"8424",af806db3:"9010","8111fb61":"9201",ab4ab49e:"9259","20a999a7":"9287","8687dcee":"9366","89ac38ee":"9371","9a3eef67":"9512","1be78505":"9514",d1b5315b:"9680","81e9ac91":"9699","10a35dc9":"9800","607d38b2":"9945"}[e]||e,r.p+r.u(e)},(()=>{var e={1303:0,532:0};r.f.j=(a,d)=>{var f=r.o(e,a)?e[a]:void 0;if(0!==f)if(f)d.push(f[2]);else if(/^(1303|532)$/.test(a))e[a]=0;else{var c=new Promise(((d,c)=>f=e[a]=[d,c]));d.push(f[2]=c);var b=r.p+r.u(a),t=new Error;r.l(b,(d=>{if(r.o(e,a)&&(0!==(f=e[a])&&(e[a]=void 0),f)){var c=d&&("load"===d.type?"missing":d.type),b=d&&d.target&&d.target.src;t.message="Loading chunk "+a+" failed.\n("+c+": "+b+")",t.name="ChunkLoadError",t.type=c,t.request=b,f[1](t)}}),"chunk-"+a,a)}},r.O.j=a=>0===e[a];var a=(a,d)=>{var f,c,b=d[0],t=d[1],o=d[2],n=0;if(b.some((a=>0!==e[a]))){for(f in t)r.o(t,f)&&(r.m[f]=t[f]);if(o)var i=o(r)}for(a&&a(d);n{"use strict";var e,a,d,f,c,b={},t={};function r(e){var a=t[e];if(void 0!==a)return a.exports;var d=t[e]={id:e,loaded:!1,exports:{}};return b[e].call(d.exports,d,d.exports,r),d.loaded=!0,d.exports}r.m=b,r.c=t,e=[],r.O=(a,d,f,c)=>{if(!d){var b=1/0;for(i=0;i=c)&&Object.keys(r.O).every((e=>r.O[e](d[o])))?d.splice(o--,1):(t=!1,c0&&e[i-1][2]>c;i--)e[i]=e[i-1];e[i]=[d,f,c]},r.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return r.d(a,{a:a}),a},d=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,r.t=function(e,f){if(1&f&&(e=this(e)),8&f)return e;if("object"==typeof e&&e){if(4&f&&e.__esModule)return e;if(16&f&&"function"==typeof e.then)return e}var c=Object.create(null);r.r(c);var b={};a=a||[null,d({}),d([]),d(d)];for(var t=2&f&&e;"object"==typeof t&&!~a.indexOf(t);t=d(t))Object.getOwnPropertyNames(t).forEach((a=>b[a]=()=>e[a]));return b.default=()=>e,r.d(c,b),c},r.d=(e,a)=>{for(var d in a)r.o(a,d)&&!r.o(e,d)&&Object.defineProperty(e,d,{enumerable:!0,get:a[d]})},r.f={},r.e=e=>Promise.all(Object.keys(r.f).reduce(((a,d)=>(r.f[d](e,a),a)),[])),r.u=e=>"assets/js/"+({53:"935f2afb",451:"e415f9f6",463:"e68a1c9e",605:"4252e969",697:"311b36d9",924:"2842b95f",1011:"77df73f1",1032:"5d928751",1033:"ef82f9f8",1060:"0e729158",1229:"52a0bca6",1414:"92551a41",1512:"806f62a0",1607:"f2563ea8",1684:"30bad7fd",1714:"1f819a6a",1723:"e68086c7",1973:"cd8cc8f6",2011:"c0f17dd2",2032:"6a39bdb7",2052:"c83b8faa",2265:"56b79ddf",2300:"45ae3dfd",2461:"8db8515f",2513:"be794740",2515:"0096c9e8",2527:"bcbfd5bd",2570:"f7e73c15",2571:"d0c9c887",2657:"352e0155",2759:"02b9e606",2851:"fac3f613",2984:"4c6b0ea3",2996:"ae95ad8d",3085:"1f391b9e",3222:"2de7f827",3237:"1df93b7f",3552:"2e8b9598",3856:"b54de702",3940:"98e51aae",4051:"323a8b36",4095:"24605d3f",4141:"55e75476",4370:"e092da67",4447:"9bd4ad20",4510:"ea288814",4586:"74126281",4600:"d6cfd461",4604:"34be08f6",4660:"2b1aa4ae",4826:"64101c1c",4994:"593df1f8",5036:"404a71d4",5101:"5ccc0acb",5134:"bc53d220",5371:"1d540fc8",5424:"64f10cae",5430:"82f8e163",5497:"52a462e1",5520:"81a7ed24",5597:"a243e695",5696:"b108acf9",5829:"63323f2d",5839:"fe186c37",5867:"9c5e90dd",5878:"9c3963e5",5988:"b3d231d1",6005:"2b3f5e4d",6210:"656f3db8",6297:"b0207dc0",6490:"51a35976",6614:"a1ee4268",6628:"d93ec163",6680:"8657d6b7",6749:"615db352",6780:"d9ba8899",6863:"52b91c1d",6997:"2ded1a41",7005:"160bf777",7053:"6d1a6fc6",7189:"302c3da2",7298:"d9523fd4",7306:"3d1e1011",7376:"f748dfb1",7414:"393be207",7465:"99b17c27",7525:"74d04fec",7549:"2369f063",7558:"0ec0ba76",7616:"306a8c6c",7628:"afecfb43",7775:"11b44e77",7904:"4f70ae63",7918:"17896441",7964:"81a92311",7966:"85e11584",7977:"a6269ae6",7986:"1aa635cc",8225:"d7dc9408",8367:"d10c9a0a",8376:"cf42168c",8424:"1a5d547c",9010:"af806db3",9201:"8111fb61",9259:"ab4ab49e",9287:"20a999a7",9366:"8687dcee",9371:"89ac38ee",9512:"9a3eef67",9514:"1be78505",9680:"d1b5315b",9699:"81e9ac91",9800:"10a35dc9",9945:"607d38b2"}[e]||e)+"."+{53:"1bc18810",451:"76aa63b1",463:"a0fc699f",605:"9b059c64",697:"4c449b75",924:"849081d7",1011:"66a549ae",1032:"e5b86cce",1033:"bd899c1b",1060:"5de5ac42",1229:"d748758f",1414:"8c50ec70",1512:"2edc5228",1607:"fa6b5001",1684:"fdb76f80",1714:"47059ed6",1723:"3054fb69",1973:"abb29663",2011:"22ea06ec",2032:"eb7e91f0",2052:"5270a3d5",2265:"d4f1c267",2300:"55c467df",2461:"76c43018",2513:"3d9fa37e",2515:"49d0a202",2527:"4f89a53d",2570:"b3b2513c",2571:"f72175e6",2657:"480b3ee9",2759:"9d100819",2851:"7fdb1be1",2984:"a893b667",2996:"e2d19c21",3085:"50df1a02",3222:"9e842577",3237:"21501263",3552:"40432512",3856:"bbc4c61c",3940:"ad36baba",4051:"7d25fd1a",4095:"0d4164e1",4141:"a4725dfe",4370:"8907921a",4447:"6f1994c0",4510:"ba96924f",4586:"f0fa827c",4600:"8d27caef",4604:"b19ae702",4660:"f9171ba1",4826:"9f684394",4972:"9218459b",4994:"351d9a17",5036:"ce43d136",5101:"6f7eb5a1",5134:"69b1938b",5371:"92b76139",5424:"2cc4f7ee",5430:"8f9802f1",5497:"58ab6440",5520:"be3cd94e",5597:"145dcdc6",5696:"05ffdcef",5829:"3fe4cc4c",5839:"6da54db6",5867:"b47077fa",5878:"6cb1685c",5988:"40acd90d",6005:"38c43856",6210:"5871db88",6297:"8698c977",6490:"70485e8c",6614:"a54c7622",6628:"4baf1b1b",6680:"192566b4",6749:"8bd4a5e5",6780:"3b1633e9",6863:"852fb386",6997:"9af81dc0",7005:"e9823c6a",7053:"f58f790b",7189:"b05a62f9",7298:"1b6a6f0f",7306:"40bbebc6",7376:"c7fdde06",7414:"5ed6e6ec",7465:"f800372e",7525:"d7ccdd67",7549:"92f4c018",7558:"858ef321",7616:"c5173e72",7628:"7710f425",7775:"6235d7bd",7904:"a875d94f",7918:"d8f6f22a",7964:"32de2540",7966:"f41a5ff3",7977:"9bd76b1d",7986:"4f8e1195",8225:"0ea13cc0",8367:"78f9feab",8376:"90a034eb",8424:"bd8579e4",9010:"a5fdaba0",9201:"c6ed6c12",9259:"4410b0a8",9287:"41453705",9366:"ba6fb95e",9371:"cfa46176",9455:"bfee0bcc",9512:"2c2a8de4",9514:"f046b65b",9680:"2f2dfd89",9699:"0503ee6e",9800:"f922fc3e",9945:"48c7fea4"}[e]+".js",r.miniCssF=e=>{},r.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),r.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),f={},c="v-2:",r.l=(e,a,d,b)=>{if(f[e])f[e].push(a);else{var t,o;if(void 0!==d)for(var n=document.getElementsByTagName("script"),i=0;i{t.onerror=t.onload=null,clearTimeout(s);var c=f[e];if(delete f[e],t.parentNode&&t.parentNode.removeChild(t),c&&c.forEach((e=>e(d))),a)return a(d)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:t}),12e4);t.onerror=l.bind(null,t.onerror),t.onload=l.bind(null,t.onload),o&&document.head.appendChild(t)}},r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.p="/en/",r.gca=function(e){return e={17896441:"7918",74126281:"4586","935f2afb":"53",e415f9f6:"451",e68a1c9e:"463","4252e969":"605","311b36d9":"697","2842b95f":"924","77df73f1":"1011","5d928751":"1032",ef82f9f8:"1033","0e729158":"1060","52a0bca6":"1229","92551a41":"1414","806f62a0":"1512",f2563ea8:"1607","30bad7fd":"1684","1f819a6a":"1714",e68086c7:"1723",cd8cc8f6:"1973",c0f17dd2:"2011","6a39bdb7":"2032",c83b8faa:"2052","56b79ddf":"2265","45ae3dfd":"2300","8db8515f":"2461",be794740:"2513","0096c9e8":"2515",bcbfd5bd:"2527",f7e73c15:"2570",d0c9c887:"2571","352e0155":"2657","02b9e606":"2759",fac3f613:"2851","4c6b0ea3":"2984",ae95ad8d:"2996","1f391b9e":"3085","2de7f827":"3222","1df93b7f":"3237","2e8b9598":"3552",b54de702:"3856","98e51aae":"3940","323a8b36":"4051","24605d3f":"4095","55e75476":"4141",e092da67:"4370","9bd4ad20":"4447",ea288814:"4510",d6cfd461:"4600","34be08f6":"4604","2b1aa4ae":"4660","64101c1c":"4826","593df1f8":"4994","404a71d4":"5036","5ccc0acb":"5101",bc53d220:"5134","1d540fc8":"5371","64f10cae":"5424","82f8e163":"5430","52a462e1":"5497","81a7ed24":"5520",a243e695:"5597",b108acf9:"5696","63323f2d":"5829",fe186c37:"5839","9c5e90dd":"5867","9c3963e5":"5878",b3d231d1:"5988","2b3f5e4d":"6005","656f3db8":"6210",b0207dc0:"6297","51a35976":"6490",a1ee4268:"6614",d93ec163:"6628","8657d6b7":"6680","615db352":"6749",d9ba8899:"6780","52b91c1d":"6863","2ded1a41":"6997","160bf777":"7005","6d1a6fc6":"7053","302c3da2":"7189",d9523fd4:"7298","3d1e1011":"7306",f748dfb1:"7376","393be207":"7414","99b17c27":"7465","74d04fec":"7525","2369f063":"7549","0ec0ba76":"7558","306a8c6c":"7616",afecfb43:"7628","11b44e77":"7775","4f70ae63":"7904","81a92311":"7964","85e11584":"7966",a6269ae6:"7977","1aa635cc":"7986",d7dc9408:"8225",d10c9a0a:"8367",cf42168c:"8376","1a5d547c":"8424",af806db3:"9010","8111fb61":"9201",ab4ab49e:"9259","20a999a7":"9287","8687dcee":"9366","89ac38ee":"9371","9a3eef67":"9512","1be78505":"9514",d1b5315b:"9680","81e9ac91":"9699","10a35dc9":"9800","607d38b2":"9945"}[e]||e,r.p+r.u(e)},(()=>{var e={1303:0,532:0};r.f.j=(a,d)=>{var f=r.o(e,a)?e[a]:void 0;if(0!==f)if(f)d.push(f[2]);else if(/^(1303|532)$/.test(a))e[a]=0;else{var c=new Promise(((d,c)=>f=e[a]=[d,c]));d.push(f[2]=c);var b=r.p+r.u(a),t=new Error;r.l(b,(d=>{if(r.o(e,a)&&(0!==(f=e[a])&&(e[a]=void 0),f)){var c=d&&("load"===d.type?"missing":d.type),b=d&&d.target&&d.target.src;t.message="Loading chunk "+a+" failed.\n("+c+": "+b+")",t.name="ChunkLoadError",t.type=c,t.request=b,f[1](t)}}),"chunk-"+a,a)}},r.O.j=a=>0===e[a];var a=(a,d)=>{var f,c,b=d[0],t=d[1],o=d[2],n=0;if(b.some((a=>0!==e[a]))){for(f in t)r.o(t,f)&&(r.m[f]=t[f]);if(o)var i=o(r)}for(a&&a(d);n - +
    -

    Community

    모두의 MLOps 릴리즈 소식

    새로운 포스트나 수정사항은 Announcements에서 확인할 수 있습니다.

    Question

    프로젝트 내용과 관련된 궁금점은 Q&A를 통해 질문할 수 있습니다.

    Suggestion

    제안점은 Ideas를 통해 제안해 주시면 됩니다.

    - +

    Community

    모두의 MLOps 릴리즈 소식

    새로운 포스트나 수정사항은 Announcements에서 확인할 수 있습니다.

    Question

    프로젝트 내용과 관련된 궁금점은 Q&A를 통해 질문할 수 있습니다.

    Suggestion

    제안점은 Ideas를 통해 제안해 주시면 됩니다.

    + \ No newline at end of file diff --git a/en/community/contributors/index.html b/en/community/contributors/index.html index a95b01ce..b9098084 100644 --- a/en/community/contributors/index.html +++ b/en/community/contributors/index.html @@ -7,13 +7,13 @@ - +
    -

    Contributors

    Main Authors

    Jongseob Jeon's avatar

    Jongseob Jeon

    Project Leader
    마키나락스에서 머신러닝 엔지니어로 일하고 있습니다. 모두의 딥러닝을 통해 많은 사람들이 딥러닝을 쉽게 접했듯이 MLOps for ALL를 통해 많은 사람들이 MLOps에 쉽게 접할수 있길 바랍니다.
    Jayeon Kim's avatar

    Jayeon Kim

    Project Member
    비효율적인 작업을 자동화하는 것에 관심이 많습니다.
    Youngchel Jang's avatar

    Youngchel Jang

    Project Member
    마키나락스에서 MLOps Engineer로 일하고 있습니다. 단순하게 생각하는 노력을 하고 있습니다.

    Contributors

    Thank you for contributing our tutorials!

    Jongsun Shinn's avatar

    Jongsun Shinn

    마키나락스에서 ML Engineer로 일하고 있습니다.
    Sangwoo Shim's avatar

    Sangwoo Shim

    마키나락스에서 CTO로 일하고 있습니다. 마키나락스는 머신러닝 기반의 산업용 AI 솔루션을 개발하는 스타트업입니다. 산업 현장의 문제 해결을 통해 사람이 본연의 일에 집중할 수 있게 만드는 것, 그것이 우리가 하는 일입니다.
    Seunghyun Ko's avatar

    Seunghyun Ko

    3i에서 MLOps Engineer로 일하고 있습니다. kubeflow에 관심이 많습니다.
    SeungTae Kim's avatar

    SeungTae Kim

    Genesis Lab이라는 스타트업에서 Applied AI Engineer 인턴 업무를 수행하고 있습니다. 머신러닝 생태계가 우리 산업 전반에 큰 변화을 가져올 것이라 믿으며, 한 걸음씩 나아가고 있습니다.
    Youngdon Tae's avatar

    Youngdon Tae

    백패커에서 ML 엔지니어로 일하고 있습니다. 자연어처리, 추천시스템, MLOps에 관심이 많습니다.
    - +

    Contributors

    Main Authors

    Jongseob Jeon's avatar

    Jongseob Jeon

    Project Leader
    마키나락스에서 머신러닝 엔지니어로 일하고 있습니다. 모두의 딥러닝을 통해 많은 사람들이 딥러닝을 쉽게 접했듯이 MLOps for ALL를 통해 많은 사람들이 MLOps에 쉽게 접할수 있길 바랍니다.
    Jayeon Kim's avatar

    Jayeon Kim

    Project Member
    비효율적인 작업을 자동화하는 것에 관심이 많습니다.
    Youngchel Jang's avatar

    Youngchel Jang

    Project Member
    마키나락스에서 MLOps Engineer로 일하고 있습니다. 단순하게 생각하는 노력을 하고 있습니다.

    Contributors

    Thank you for contributing our tutorials!

    Jongsun Shinn's avatar

    Jongsun Shinn

    마키나락스에서 ML Engineer로 일하고 있습니다.
    Sangwoo Shim's avatar

    Sangwoo Shim

    마키나락스에서 CTO로 일하고 있습니다. 마키나락스는 머신러닝 기반의 산업용 AI 솔루션을 개발하는 스타트업입니다. 산업 현장의 문제 해결을 통해 사람이 본연의 일에 집중할 수 있게 만드는 것, 그것이 우리가 하는 일입니다.
    Seunghyun Ko's avatar

    Seunghyun Ko

    3i에서 MLOps Engineer로 일하고 있습니다. kubeflow에 관심이 많습니다.
    SeungTae Kim's avatar

    SeungTae Kim

    Genesis Lab이라는 스타트업에서 Applied AI Engineer 인턴 업무를 수행하고 있습니다. 머신러닝 생태계가 우리 산업 전반에 큰 변화을 가져올 것이라 믿으며, 한 걸음씩 나아가고 있습니다.
    Youngdon Tae's avatar

    Youngdon Tae

    백패커에서 ML 엔지니어로 일하고 있습니다. 자연어처리, 추천시스템, MLOps에 관심이 많습니다.
    + \ No newline at end of file diff --git a/en/community/how-to-contribute/index.html b/en/community/how-to-contribute/index.html index d9ab18a8..7729e2c5 100644 --- a/en/community/how-to-contribute/index.html +++ b/en/community/how-to-contribute/index.html @@ -7,15 +7,15 @@ - +

    How to Contribute

    How to Start

    Git Repo 준비

    1. 모두의 MLOps GitHub Repository에 접속합니다.

    2. 여러분의 개인 Repository로 Fork합니다.

    3. Forked Repository를 여러분의 작업 환경으로 git clone합니다.

    환경 설정

    1. 모두의 MLOps는 Hugo 와 Node를 이용하고 있습니다.
      다음 명령어를 통해 필요한 패키지가 설치되어 있는지 확인합니다.
    • node & npm

      npm --version
    • hugo

      hugo version
    1. 필요한 node module을 설치합니다.

      npm install
    2. 프로젝트에서는 각 글의 일관성을 위해서 여러 markdown lint를 적용하고 있습니다.
      다음 명령어를 실행해 test를 진행한 후 커밋합니다.내용 수정 및 추가 후 lint가 맞는지 확인합니다.

      npm test
    3. lint 확인 완료 후 ci 를 실행합니다.

      npm ci
    4. 로컬에서 실행 후 수정한 글이 정상적으로 나오는지 확인합니다.

      npm run start

    How to Contribute

    1. 새로운 포스트를 작성할 때

    새로운 포스트는 각 챕터와 포스트의 위치에 맞는 weight를 설정합니다.

    • Introduction: 1xx
    • Setup: 2xx
    • Kubeflow: 3xx
    • API Deployment: 4xx
    • Help: 10xx

    2. 기존의 포스트를 수정할 때

    기존의 포스트를 수정할 때 Contributor에 본인의 이름을 입력합니다.

    contributors: ["John Doe", "Adam Smith"]

    3. 프로젝트에 처음 기여할 때

    만약 프로젝트에 처음 기여 할 때 content/kor/contributors에 본인의 이름으로 폴더를 생성한 후, _index.md라는 파일을 작성합니다.

    예를 들어, minsoo kim이 본인의 영어 이름이라면, 폴더명은 minsoo-kim으로 하여 해당 폴더 내부의 _index.md파일에 다음의 내용을 작성합니다. -폴더명은 하이픈(-)으로 연결한 소문자로, title은 띄어쓰기를 포함한 CamelCase로 작성합니다.

    ---
    title: "John Doe"
    draft: false
    ---

    After Pull Request

    Pull Request를 생성하면 프로젝트에서는 자동으로 모두의 MLOps 운영진에게 리뷰 요청이 전해집니다. 최대 일주일 이내로 확인 후 Comment를 드릴 예정입니다.

    - +폴더명은 하이픈(-)으로 연결한 소문자로, title은 띄어쓰기를 포함한 CamelCase로 작성합니다.

    ---
    title: "John Doe"
    draft: false
    ---

    After Pull Request

    Pull Request를 생성하면 프로젝트에서는 자동으로 모두의 MLOps 운영진에게 리뷰 요청이 전해집니다. 최대 일주일 이내로 확인 후 Comment를 드릴 예정입니다.

    + \ No newline at end of file diff --git a/en/docs/1.0/api-deployment/seldon-children/index.html b/en/docs/1.0/api-deployment/seldon-children/index.html index 8397a450..ff3d34ad 100644 --- a/en/docs/1.0/api-deployment/seldon-children/index.html +++ b/en/docs/1.0/api-deployment/seldon-children/index.html @@ -7,14 +7,14 @@ - +
    Version: 1.0

    6. Multi Models

    Previously, the methods explained were all targeted at a single model. On this page, we will look at how to connect multiple models.

    First, we will create a pipeline that creates two models. We will add a StandardScaler to the SVC model we used before and store it.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_scaler_from_csv(
    data_path: InputPath("csv"),
    scaled_data_path: OutputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    ):
    import dill
    import pandas as pd
    from sklearn.preprocessing import StandardScaler

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    data = pd.read_csv(data_path)

    scaler = StandardScaler()
    scaled_data = scaler.fit_transform(data)
    scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)

    scaled_data.to_csv(scaled_data_path, index=False)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(scaler, file_writer)

    input_example = data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(data, scaler.transform(data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["scikit-learn"],
    install_mlflow=False
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_svc_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["scikit-learn"],
    install_mlflow=False
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)
    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    from kfp.dsl import pipeline


    @pipeline(name="multi_model_pipeline")
    def multi_model_pipeline(kernel: str = "rbf"):
    iris_data = load_iris_data()
    scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])
    _ = upload_sklearn_model_to_mlflow(
    model_name="scaler",
    model=scaled_data.outputs["model"],
    input_example=scaled_data.outputs["input_example"],
    signature=scaled_data.outputs["signature"],
    conda_env=scaled_data.outputs["conda_env"],
    )
    model = train_svc_from_csv(
    train_data=scaled_data.outputs["scaled_data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name="svc",
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")

    If you upload the pipeline, it will look like this. -children-kubeflow.png

    When you check the MLflow dashboard, two models will be generated, as shown below.

    children-mlflow.png

    After checking the run_id of each one, define the SeldonDeployment spec as follows.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph.

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc.

    Now let's create the above specifications in a yaml file.

    cat <<EOF > multi-model.yaml
    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    EOF

    Create an API through the following command.

    kubectl apply -f multi-model.yaml

    If properly performed, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/multi-model-example created

    Check to see if it has been generated normally.

    kubectl get po -n kubeflow-user-example-com | grep multi-model-example

    If it is created normally, a similar pod will be created.

    multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
    - +children-kubeflow.png

    When you check the MLflow dashboard, two models will be generated, as shown below.

    children-mlflow.png

    After checking the run_id of each one, define the SeldonDeployment spec as follows.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph.

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc.

    Now let's create the above specifications in a yaml file.

    cat <<EOF > multi-model.yaml
    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    EOF

    Create an API through the following command.

    kubectl apply -f multi-model.yaml

    If properly performed, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/multi-model-example created

    Check to see if it has been generated normally.

    kubectl get po -n kubeflow-user-example-com | grep multi-model-example

    If it is created normally, a similar pod will be created.

    multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
    + \ No newline at end of file diff --git a/en/docs/1.0/api-deployment/seldon-fields/index.html b/en/docs/1.0/api-deployment/seldon-fields/index.html index cb2ae443..0306a96e 100644 --- a/en/docs/1.0/api-deployment/seldon-fields/index.html +++ b/en/docs/1.0/api-deployment/seldon-fields/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    4. Seldon Fields

    Summary of how Seldon Core creates an API server:

    1. initContainer downloads the required model from the model repository.
    2. The downloaded model is passed to the container.
    3. The container runs an API server enclosing the model.
    4. The API can be requested at the generated API server address to receive the inference values from the model.

    The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    The name and predictors fields of SeldonDeployment are required fields. name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. predictors must be a single array consisting of name, componentSpecs and graph defined. Here also, name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect.

    Now let's take a look at the fields that need to be defined in componentSpecs and graph.

    componentSpecs

    componentSpecs must be a single array consisting of the spec key. The spec must have the fields volumes, initContainers and containers defined.

    volumes

    volumes:
    - name: model-provision-location
    emptyDir: {}

    Volumes refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components name and emptyDir. These values are used only once when downloading and moving the models, so they do not need to be modified significantly.

    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    The args field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations.

    initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry.

    The value of initContainer consists of n arrays, and each model needs to be specified separately.

    name

    name is the name of the pod in Kubernetes, and it is recommended to use {model_name}-initializer for debugging.

    image

    image is the name of the image used to download the model, and there are two recommended images by

    • gcr.io/kfserving/storage-initializer:v0.4.0
    • seldonio/rclone-storage-initializer:1.13.0-dev

    For more detailed information, please refer to the following resources:

    In MLOps for ALL, we use kfserving for downloading and storing models.

    args

    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"

    When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (run), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in /mnt/models).

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    volumeMounts is a field that attaches volumes to the Kubernetes to share /mnt/models as described in volumes. For more information, refer to Kubernetes Volume Kubernetes Volume."

    container

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    Container defines the fields that determine the configuration when the model is run in an API form.

    name

    The name field refers to the name of the pod in Kubernetes. It should be the name of the model being used.

    image

    The image field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded.

    Seldon Core provides official images for different types of models, including:

    • seldonio/sklearnserver
    • seldonio/mlflowserver
    • seldonio/xgboostserver
    • seldonio/tfserving

    You can choose the appropriate image based on the type of model you are using.

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true

    This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, readOnly: true will also be given.

    securityContext

    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service).

    graph

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    This is a field that defines the order in which the model operates.

    name

    The name field refers to the name of the model graph. It should match the name defined in the container.

    type

    The type field can have four different values:

    1. TRANSFORMER
    2. MODEL
    3. OUTPUT_TRANSFORMER
    4. ROUTER

    For detailed explanations of each type, you can refer to the Seldon Core Complex Graphs Metadata Example.

    parameters

    The parameters field contains values used in the class init. For the sklearnserver, you can find the required values in the following file.

    class SKLearnServer(SeldonComponent):
    def __init__(self, model_uri: str = None, method: str = "predict_proba"):

    If you look at the code, you can define model_uri and method.

    children

    The children field is used when creating the sequence diagram. More details about this field will be explained on the following page.

    - +
    Version: 1.0

    4. Seldon Fields

    Summary of how Seldon Core creates an API server:

    1. initContainer downloads the required model from the model repository.
    2. The downloaded model is passed to the container.
    3. The container runs an API server enclosing the model.
    4. The API can be requested at the generated API server address to receive the inference values from the model.

    The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    The name and predictors fields of SeldonDeployment are required fields. name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. predictors must be a single array consisting of name, componentSpecs and graph defined. Here also, name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect.

    Now let's take a look at the fields that need to be defined in componentSpecs and graph.

    componentSpecs

    componentSpecs must be a single array consisting of the spec key. The spec must have the fields volumes, initContainers and containers defined.

    volumes

    volumes:
    - name: model-provision-location
    emptyDir: {}

    Volumes refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components name and emptyDir. These values are used only once when downloading and moving the models, so they do not need to be modified significantly.

    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    The args field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations.

    initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry.

    The value of initContainer consists of n arrays, and each model needs to be specified separately.

    name

    name is the name of the pod in Kubernetes, and it is recommended to use {model_name}-initializer for debugging.

    image

    image is the name of the image used to download the model, and there are two recommended images by

    • gcr.io/kfserving/storage-initializer:v0.4.0
    • seldonio/rclone-storage-initializer:1.13.0-dev

    For more detailed information, please refer to the following resources:

    In MLOps for ALL, we use kfserving for downloading and storing models.

    args

    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"

    When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (run), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in /mnt/models).

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    volumeMounts is a field that attaches volumes to the Kubernetes to share /mnt/models as described in volumes. For more information, refer to Kubernetes Volume Kubernetes Volume."

    container

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    Container defines the fields that determine the configuration when the model is run in an API form.

    name

    The name field refers to the name of the pod in Kubernetes. It should be the name of the model being used.

    image

    The image field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded.

    Seldon Core provides official images for different types of models, including:

    • seldonio/sklearnserver
    • seldonio/mlflowserver
    • seldonio/xgboostserver
    • seldonio/tfserving

    You can choose the appropriate image based on the type of model you are using.

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true

    This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, readOnly: true will also be given.

    securityContext

    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service).

    graph

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    This is a field that defines the order in which the model operates.

    name

    The name field refers to the name of the model graph. It should match the name defined in the container.

    type

    The type field can have four different values:

    1. TRANSFORMER
    2. MODEL
    3. OUTPUT_TRANSFORMER
    4. ROUTER

    For detailed explanations of each type, you can refer to the Seldon Core Complex Graphs Metadata Example.

    parameters

    The parameters field contains values used in the class init. For the sklearnserver, you can find the required values in the following file.

    class SKLearnServer(SeldonComponent):
    def __init__(self, model_uri: str = None, method: str = "predict_proba"):

    If you look at the code, you can define model_uri and method.

    children

    The children field is used when creating the sequence diagram. More details about this field will be explained on the following page.

    + \ No newline at end of file diff --git a/en/docs/1.0/api-deployment/seldon-iris/index.html b/en/docs/1.0/api-deployment/seldon-iris/index.html index bd073c34..4042ac34 100644 --- a/en/docs/1.0/api-deployment/seldon-iris/index.html +++ b/en/docs/1.0/api-deployment/seldon-iris/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Because this iris model is trained through the sklearn framework, we use SKLEARN_SERVER.

    cat <<EOF > iris-sdep.yaml
    apiVersion: machinelearning.seldon.io/v1alpha2
    kind: SeldonDeployment
    metadata:
    name: sklearn
    namespace: seldon-deploy
    spec:
    name: iris
    predictors:
    - graph:
    children: []
    implementation: SKLEARN_SERVER
    modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris
    name: classifier
    name: default
    replicas: 1
    EOF

    Deploy yaml file.

    kubectl apply -f iris-sdep.yaml

    Check if the deployment was successful through the following command.

    kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy

    If everyone runs, similar results will be printed.

    NAME                                            READY   STATUS    RESTARTS   AGE
    sklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m

    Ingress URL

    Now, send a inference request to the deployed model to get the inference result. The API created by the SeldonDeployment follows the following rule: http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/

    NODE_IP / NODE_PORT

    Since Seldon Core was installed with Ambassador as the Ingress Controller, all APIs created by SeldonDeployment can be requested through the Ambassador Ingress gateway.

    Therefore, first set the url of the Ambassador Ingress Gateway as an environment variable.

    export NODE_IP=$(kubectl get nodes -o jsonpath='{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }')
    export NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")

    Check the set url.

    echo "NODE_IP"=$NODE_IP
    echo "NODE_PORT"=$NODE_PORT

    It should be outputted similarly as follows, and if set through the cloud, you can check that internal IP address is set.

    NODE_IP=192.168.0.19
    NODE_PORT=30486

    namespace / seldon-deployment-name

    This refers to the namespace and seldon-deployment-name where the SeldonDeployment is deployed and used to define the values defined in the metadata when defining the spec.

    metadata:
    name: sklearn
    namespace: seldon-deploy

    In the example above, namespace is seldon-deploy, seldon-deployment-name is sklearn.

    method-name

    In SeldonDeployment, the commonly used method-name has two options:

    1. doc
    2. predictions

    The detailed usage of each method is explained below.

    Using Swagger

    First, let's explore how to use the doc method, which allows access to the Swagger generated by Seldon.

    1. Accessing Swagger

    According to the provided ingress URL rules, you can access the Swagger documentation using the following URL: http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/

    iris-swagger1.png

    2. Selecting Swagger Predictions

    In the Swagger UI, select the /seldon/seldon-deploy/sklearn/api/v1.0/predictions endpoint.

    iris-swagger2.png

    3. Choosing Try it out

    iris-swagger3.png

    4. Inputting data in the Request body

    iris-swagger4.png

    Enter the following data into the Request body.

    {
    "data": {
    "ndarray":[[1.0, 2.0, 5.0, 6.0]]
    }
    }

    5. Check the inference results

    You can click the Execute button to obtain the inference result.

    iris-swagger5.png

    If everything is executed successfully, you will obtain the following inference result.

    {
    "data": {
    "names": [
    "t:0",
    "t:1",
    "t:2"
    ],
    "ndarray": [
    [
    9.912315378486697e-7,
    0.0007015931307746079,
    0.9992974156376876
    ]
    ]
    },
    "meta": {
    "requestPath": {
    "classifier": "seldonio/sklearnserver:1.11.2"
    }
    }
    }

    Using CLI

    Also, you can use http client CLI tools such as curl to make API requests. -For example, requesting /predictions as follows

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    You can confirm that the following response is outputted normally.

    {"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
    - +For example, requesting /predictions as follows

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    You can confirm that the following response is outputted normally.

    {"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
    + \ No newline at end of file diff --git a/en/docs/1.0/api-deployment/seldon-mlflow/index.html b/en/docs/1.0/api-deployment/seldon-mlflow/index.html index 11d56942..1807fbd3 100644 --- a/en/docs/1.0/api-deployment/seldon-mlflow/index.html +++ b/en/docs/1.0/api-deployment/seldon-mlflow/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    5. Model from MLflow

    Model from MLflow

    On this page, we will learn how to create an API using a model saved in the MLflow Component.

    Secret

    The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows.

    apiVersion: v1
    type: Opaque
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    data:
    AWS_ACCESS_KEY_ID: bWluaW8K=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp
    USE_SSL: ZmFsc2U=

    The input value for AWS_ACCESS_KEY_ID is minio. However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following.

    The values that need to be entered in data are as follows.

    The encoding can be done using the following command.

    echo -n minio | base64

    Then the following values will be output.

    bWluaW8=

    If you do the encoding for the entire value, it will look like this:

    You can generate a yaml file through the following command to create the secret.

    cat <<EOF > seldon-init-container-secret.yaml
    apiVersion: v1
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    type: Opaque
    data:
    AWS_ACCESS_KEY_ID: bWluaW8=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
    USE_SSL: ZmFsc2U=
    EOF

    Create the secret through the following command.

    kubectl apply -f seldon-init-container-secret.yaml

    If performed normally, it will be output as follows.

    secret/seldon-init-container-secret created

    Seldon Core yaml

    Now let's write the yaml file to create Seldon Core.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    There are two major changes compared to the previously created Seldon Fields:

    1. The envFrom field is added to the initContainer.
    2. The address in the args has been changed to s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc.

    args

    Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?

    To find the path, go back to MLflow and click on the run, then click on the model, as shown below:

    seldon-mlflow-0.png

    You can use the path obtained from there.

    envFrom

    This process involves providing the environment variables required to access MinIO and download the model. We will use the seldon-init-container-secret created earlier.

    API Creation

    First, let's generate the YAML file based on the specification defined above.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: xtype
    type: STRING
    value: "dataframe"
    children: []
    EOF

    Create a seldon pod.

    kubectl apply -f seldon-mlflow.yaml

    If it is performed normally, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/seldon-example created

    Now we wait until the pod is up and running properly.

    kubectl get po -n kubeflow-user-example-com | grep seldon

    If it is outputted similarly to the following, the API has been created normally.

    seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

    You can confirm the execution through the following request on the API created through the CLI.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{
    "data": {
    "ndarray": [
    [
    143.0,
    0.0,
    30.0,
    30.0
    ]
    ],
    "names": [
    "sepal length (cm)",
    "sepal width (cm)",
    "petal length (cm)",
    "petal width (cm)"
    ]
    }
    }'

    If executed normally, you can get the following results.

    {"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
    - +
    Version: 1.0

    5. Model from MLflow

    Model from MLflow

    On this page, we will learn how to create an API using a model saved in the MLflow Component.

    Secret

    The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows.

    apiVersion: v1
    type: Opaque
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    data:
    AWS_ACCESS_KEY_ID: bWluaW8K=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp
    USE_SSL: ZmFsc2U=

    The input value for AWS_ACCESS_KEY_ID is minio. However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following.

    The values that need to be entered in data are as follows.

    The encoding can be done using the following command.

    echo -n minio | base64

    Then the following values will be output.

    bWluaW8=

    If you do the encoding for the entire value, it will look like this:

    You can generate a yaml file through the following command to create the secret.

    cat <<EOF > seldon-init-container-secret.yaml
    apiVersion: v1
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    type: Opaque
    data:
    AWS_ACCESS_KEY_ID: bWluaW8=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
    USE_SSL: ZmFsc2U=
    EOF

    Create the secret through the following command.

    kubectl apply -f seldon-init-container-secret.yaml

    If performed normally, it will be output as follows.

    secret/seldon-init-container-secret created

    Seldon Core yaml

    Now let's write the yaml file to create Seldon Core.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    There are two major changes compared to the previously created Seldon Fields:

    1. The envFrom field is added to the initContainer.
    2. The address in the args has been changed to s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc.

    args

    Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?

    To find the path, go back to MLflow and click on the run, then click on the model, as shown below:

    seldon-mlflow-0.png

    You can use the path obtained from there.

    envFrom

    This process involves providing the environment variables required to access MinIO and download the model. We will use the seldon-init-container-secret created earlier.

    API Creation

    First, let's generate the YAML file based on the specification defined above.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: xtype
    type: STRING
    value: "dataframe"
    children: []
    EOF

    Create a seldon pod.

    kubectl apply -f seldon-mlflow.yaml

    If it is performed normally, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/seldon-example created

    Now we wait until the pod is up and running properly.

    kubectl get po -n kubeflow-user-example-com | grep seldon

    If it is outputted similarly to the following, the API has been created normally.

    seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

    You can confirm the execution through the following request on the API created through the CLI.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{
    "data": {
    "ndarray": [
    [
    143.0,
    0.0,
    30.0,
    30.0
    ]
    ],
    "names": [
    "sepal length (cm)",
    "sepal width (cm)",
    "petal length (cm)",
    "petal width (cm)"
    ]
    }
    }'

    If executed normally, you can get the following results.

    {"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
    + \ No newline at end of file diff --git a/en/docs/1.0/api-deployment/seldon-pg/index.html b/en/docs/1.0/api-deployment/seldon-pg/index.html index 1d54c8e2..d198b500 100644 --- a/en/docs/1.0/api-deployment/seldon-pg/index.html +++ b/en/docs/1.0/api-deployment/seldon-pg/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    3. Seldon Monitoring

    Grafana & Prometheus

    Now, let's perform repeated API requests with the SeldonDeployment we created on the previous page and check if the dashboard changes.

    Dashboard

    Forward the dashboard created earlier.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Request API

    Request repeated to the previously created Seldon Deployment.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from 0 ops.

    repeat-raise.png

    This confirms that Prometheus and Grafana have been successfully installed and configured.

    - +
    Version: 1.0

    3. Seldon Monitoring

    Grafana & Prometheus

    Now, let's perform repeated API requests with the SeldonDeployment we created on the previous page and check if the dashboard changes.

    Dashboard

    Forward the dashboard created earlier.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Request API

    Request repeated to the previously created Seldon Deployment.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from 0 ops.

    repeat-raise.png

    This confirms that Prometheus and Grafana have been successfully installed and configured.

    + \ No newline at end of file diff --git a/en/docs/1.0/api-deployment/what-is-api-deployment/index.html b/en/docs/1.0/api-deployment/what-is-api-deployment/index.html index c3ec801c..a281d547 100644 --- a/en/docs/1.0/api-deployment/what-is-api-deployment/index.html +++ b/en/docs/1.0/api-deployment/what-is-api-deployment/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    1. What is API Deployment?

    What is API Deployment?

    After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily.

    When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people.

    Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services.

    However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines.

    Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response.

    Some well-known open-source inference engines include:

    While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost.

    Deploying and serving the model's inference results through an API is called API deployment.

    Serving Framework

    I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks.

    To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment.

    Some popular serving frameworks include:

    In MLOps for ALL, we use Seldon Core to demonstrate the process of API deployment.

    - +
    Version: 1.0

    1. What is API Deployment?

    What is API Deployment?

    After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily.

    When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people.

    Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services.

    However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines.

    Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response.

    Some well-known open-source inference engines include:

    While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost.

    Deploying and serving the model's inference results through an API is called API deployment.

    Serving Framework

    I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks.

    To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment.

    Some popular serving frameworks include:

    In MLOps for ALL, we use Seldon Core to demonstrate the process of API deployment.

    + \ No newline at end of file diff --git a/en/docs/1.0/appendix/metallb/index.html b/en/docs/1.0/appendix/metallb/index.html index 21b5599d..81362a5a 100644 --- a/en/docs/1.0/appendix/metallb/index.html +++ b/en/docs/1.0/appendix/metallb/index.html @@ -7,7 +7,7 @@ - + @@ -19,8 +19,8 @@ When using Layer 2 mode, it is not necessary to bind IP to the network interface of the worker node, because it operates in a way that it responds directly to the ARP request of the local network and provides the computer's MAC address to the client.

    The following metallb_config.yaml file is the configuration for MetalLB to provide control over the IP range of 192.168.35.100 ~ 192.168.35.110, and to configure Layer 2 mode.

    In case the cluster node and the client node are separated, the range of 192.168.35.100 ~ 192.168.35.110 must be accessible by both the client node and the cluster node.

    metallb_config.yaml

    apiVersion: v1
    kind: ConfigMap
    metadata:
    namespace: metallb-system
    name: config
    data:
    config: |
    address-pools:
    - name: default
    protocol: layer2
    addresses:
    - 192.168.35.100-192.168.35.110 # IP 대역폭

    Apply the above settings.

    kubectl apply -f metallb_config.yaml 

    If deployed normally, it will output as follows.

    configmap/config created

    Using MetalLB

    Kubeflow Dashboard

    First, before getting the load-balancing feature from MetalLB, check the current status by changing the type of the istio-ingressgateway service in the istio-system namespace to LoadBalancer to provide the Kubeflow Dashboard.

    kubectl get svc/istio-ingressgateway -n istio-system

    The type of this service is ClusterIP and you can see that the External-IP value is none.

    NAME                   TYPE        CLUSTER-IP    EXTERNAL-IP   PORT(S)                                        AGE
    istio-ingressgateway ClusterIP 10.103.72.5 <none> 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m

    Change the type to LoadBalancer and if you want to input a desired IP address, add the loadBalancerIP item.
    If you do not add it, IP addresses will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/istio-ingressgateway -n istio-system
    spec:
    clusterIP: 10.103.72.5
    clusterIPs:
    - 10.103.72.5
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: status-port
    port: 15021
    protocol: TCP
    targetPort: 15021
    - name: http2
    port: 80
    protocol: TCP
    targetPort: 8080
    - name: https
    port: 443
    protocol: TCP
    targetPort: 8443
    - name: tcp
    port: 31400
    protocol: TCP
    targetPort: 31400
    - name: tls
    port: 15443
    protocol: TCP
    targetPort: 15443
    selector:
    app: istio-ingressgateway
    istio: ingressgateway
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.100 # Add IP
    status:
    loadBalancer: {}

    If you check again, you will see that the External-IP value is 192.168.35.100.

    kubectl get svc/istio-ingressgateway -n istio-system
    NAME                   TYPE           CLUSTER-IP    EXTERNAL-IP      PORT(S)                                                                      AGE
    istio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m

    Open a web browser and connect to http://192.168.35.100 to verify the following screen is output.

    login-after-istio-ingressgateway-setting.png

    minio Dashboard

    First, we check the current status before changing the type of minio-service, which provides the Dashboard of minio, in the kubeflow namespace to LoadBalancer to receive the load balancing function from MetalLB.

    kubectl get svc/minio-service -n kubeflow

    The type of this service is ClusterIP and you can confirm that the External-IP value is none.

    NAME            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)    AGE
    minio-service ClusterIP 10.109.209.87 <none> 9000/TCP 5h14m

    Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item. If you do not add, the IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/minio-service -n kubeflow
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    kubectl.kubernetes.io/last-applied-configuration: |
    {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>
    creationTimestamp: "2022-01-05T08:44:23Z"
    labels:
    application-crd-id: kubeflow-pipelines
    name: minio-service
    namespace: kubeflow
    resourceVersion: "21120"
    uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48
    spec:
    clusterIP: 10.109.209.87
    clusterIPs:
    - 10.109.209.87
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: http
    port: 9000
    protocol: TCP
    targetPort: 9000
    selector:
    app: minio
    application-crd-id: kubeflow-pipelines
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.101 # Add IP
    status:
    loadBalancer: {}

    If we check again, we can see that the External-IP value is 192.168.35.101.

    kubectl get svc/minio-service -n kubeflow
    NAME            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)          AGE
    minio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m

    Open a web browser and connect to http://192.168.35.101:9000 to confirm the following screen is printed.

    login-after-minio-setting.png

    mlflow Dashboard

    First, we check the current status before changing the type of mlflow-server-service service in the mlflow-system namespace that provides the mlflow Dashboard to LoadBalancer to receive load balancing function from MetalLB.

    kubectl get svc/mlflow-server-service -n mlflow-system

    The type of this service is ClusterIP and you can confirm that the External-IP value is none.

    NAME                    TYPE        CLUSTER-IP       EXTERNAL-IP   PORT(S)    AGE
    mlflow-server-service ClusterIP 10.111.173.209 <none> 5000/TCP 4m50s

    Change the type to LoadBalancer and if you want to input the desired IP address, add the loadBalancerIP item.
    If you do not add it, the IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/mlflow-server-service -n mlflow-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: mlflow-server
    meta.helm.sh/release-namespace: mlflow-system
    creationTimestamp: "2022-01-07T04:00:19Z"
    labels:
    app.kubernetes.io/managed-by: Helm
    name: mlflow-server-service
    namespace: mlflow-system
    resourceVersion: "276246"
    uid: e5d39fb7-ad98-47e7-b512-f9c673055356
    spec:
    clusterIP: 10.111.173.209
    clusterIPs:
    - 10.111.173.209
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - port: 5000
    protocol: TCP
    targetPort: 5000
    selector:
    app.kubernetes.io/name: mlflow-server
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.102 # Add IP
    status:
    loadBalancer: {}

    If we check again, we can see that the External-IP value is 192.168.35.102.

    kubectl get svc/mlflow-server-service -n mlflow-system
    NAME                    TYPE           CLUSTER-IP       EXTERNAL-IP      PORT(S)          AGE
    mlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s

    Open the web browser and connect to http://192.168.35.102:5000 to confirm the following screen is displayed.

    login-after-mlflow-setting.png

    Grafana Dashboard

    First, check the current status before changing the type of seldon-core-analytics-grafana service in the seldon-system namespace which provides Grafana's Dashboard to receive Load Balancing function from MetalLB.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system

    The type of the corresponding service is ClusterIP, and you can see that the External-IP value is none.

    NAME                            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)   AGE
    seldon-core-analytics-grafana ClusterIP 10.109.20.161 <none> 80/TCP 94s

    Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item.
    -If not, an IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: seldon-core-analytics
    meta.helm.sh/release-namespace: seldon-system
    creationTimestamp: "2022-01-07T04:16:47Z"
    labels:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/managed-by: Helm
    app.kubernetes.io/name: grafana
    app.kubernetes.io/version: 7.0.3
    helm.sh/chart: grafana-5.1.4
    name: seldon-core-analytics-grafana
    namespace: seldon-system
    resourceVersion: "280605"
    uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
    spec:
    clusterIP: 10.109.20.161
    clusterIPs:
    - 10.109.20.161
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: service
    port: 80
    protocol: TCP
    targetPort: 3000
    selector:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/name: grafana
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.103 # Add IP
    status:
    loadBalancer: {}

    If you check again, you can see that the External-IP value is 192.168.35.103.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system
    NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
    seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

    Open the Web Browser and connect to http://192.168.35.103:80 to confirm that the following screen is displayed.

    login-after-grafana-setting.png

    - +If not, an IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: seldon-core-analytics
    meta.helm.sh/release-namespace: seldon-system
    creationTimestamp: "2022-01-07T04:16:47Z"
    labels:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/managed-by: Helm
    app.kubernetes.io/name: grafana
    app.kubernetes.io/version: 7.0.3
    helm.sh/chart: grafana-5.1.4
    name: seldon-core-analytics-grafana
    namespace: seldon-system
    resourceVersion: "280605"
    uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
    spec:
    clusterIP: 10.109.20.161
    clusterIPs:
    - 10.109.20.161
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: service
    port: 80
    protocol: TCP
    targetPort: 3000
    selector:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/name: grafana
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.103 # Add IP
    status:
    loadBalancer: {}

    If you check again, you can see that the External-IP value is 192.168.35.103.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system
    NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
    seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

    Open the Web Browser and connect to http://192.168.35.103:80 to confirm that the following screen is displayed.

    login-after-grafana-setting.png

    + \ No newline at end of file diff --git a/en/docs/1.0/appendix/pyenv/index.html b/en/docs/1.0/appendix/pyenv/index.html index 08e0391d..1348707f 100644 --- a/en/docs/1.0/appendix/pyenv/index.html +++ b/en/docs/1.0/appendix/pyenv/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    1. Install Python virtual environment

    Python virtual environment

    When working with Python, there may be cases where you want to use multiple versions of Python environments or manage package versions separately for different projects.

    To easily manage Python environments or Python package environments in a virtualized manner, there are tools available such as pyenv, conda, virtualenv, and venv.

    Among these, MLOps for ALL covers the installation of pyenv and pyenv-virtualenv.
    pyenv helps manage Python versions, while pyenv-virtualenv is a plugin for pyenv that helps manage Python package environments.

    Installing pyenv

    Prerequisites

    Prerequisites vary depending on the operating system. Please refer to the following page and install the required packages accordingly.

    Installation - macOS

    1. Install pyenv, pyenv-virtualenv
    brew update
    brew install pyenv
    brew install pyenv-virtualenv
    1. Set pyenv

    For macOS, assuming the use of zsh since the default shell has changed to zsh in Catalina version and later, setting up pyenv.

    echo 'eval "$(pyenv init -)"' >> ~/.zshrc
    echo 'eval "$(pyenv virtualenv-init -)"' >> ~/.zshrc
    source ~/.zshrc

    Check if the pyenv command is executed properly.

    pyenv --help
    $ pyenv --help
    Usage: pyenv <command> [<args>]

    Some useful pyenv commands are:
    --version Display the version of pyenv
    activate Activate virtual environment
    commands List all available pyenv commands
    deactivate Deactivate virtual environment
    exec Run an executable with the selected Python version
    global Set or show the global Python version(s)
    help Display help for a command
    hooks List hook scripts for a given pyenv command
    init Configure the shell environment for pyenv
    install Install a Python version using python-build
    local Set or show the local application-specific Python version(s)
    prefix Display prefix for a Python version
    rehash Rehash pyenv shims (run this after installing executables)
    root Display the root directory where versions and shims are kept
    shell Set or show the shell-specific Python version
    shims List existing pyenv shims
    uninstall Uninstall a specific Python version
    version Show the current Python version(s) and its origin
    version-file Detect the file that sets the current pyenv version
    version-name Show the current Python version
    version-origin Explain how the current Python version is set
    versions List all Python versions available to pyenv
    virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
    virtualenv-delete Uninstall a specific Python virtualenv
    virtualenv-init Configure the shell environment for pyenv-virtualenv
    virtualenv-prefix Display real_prefix for a Python virtualenv version
    virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
    whence List all Python versions that contain the given executable
    which Display the full path to an executable

    See `pyenv help <command>' for information on a specific command.
    For full documentation, see: https://github.com/pyenv/pyenv#readme

    Installation - Ubuntu

    1. Install pyenv and pyenv-virtualenv
    curl https://pyenv.run | bash

    If the following content is output, it means that the installation is successful.

      % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
    Dload Upload Total Spent Left Speed
    0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239
    Cloning into '/home/mlops/.pyenv'...
    r
    ...
    Skip...
    ...
    remote: Enumerating objects: 10, done.
    remote: Counting objects: 100% (10/10), done.
    remote: Compressing objects: 100% (6/6), done.
    remote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0
    Unpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.

    WARNING: seems you still have not added 'pyenv' to the load path.


    # See the README for instructions on how to set up
    # your shell environment for Pyenv.

    # Load pyenv-virtualenv automatically by adding
    # the following to ~/.bashrc:

    eval "$(pyenv virtualenv-init -)"

    1. Set pyenv

    Assuming the use of bash shell as the default shell, configure pyenv and pyenv-virtualenv to be used in bash.

    sudo vi ~/.bashrc

    Enter the following string and save it.

    export PATH="$HOME/.pyenv/bin:$PATH"
    eval "$(pyenv init -)"
    eval "$(pyenv virtualenv-init -)"

    Restart the shell.

    exec $SHELL

    Check if the pyenv command is executed properly.

    pyenv --help

    If the following message is displayed, it means that the settings have been configured correctly.

    $ pyenv
    pyenv 2.2.2
    Usage: pyenv <command> [<args>]

    Some useful pyenv commands are:
    --version Display the version of pyenv
    activate Activate virtual environment
    commands List all available pyenv commands
    deactivate Deactivate virtual environment
    doctor Verify pyenv installation and development tools to build pythons.
    exec Run an executable with the selected Python version
    global Set or show the global Python version(s)
    help Display help for a command
    hooks List hook scripts for a given pyenv command
    init Configure the shell environment for pyenv
    install Install a Python version using python-build
    local Set or show the local application-specific Python version(s)
    prefix Display prefix for a Python version
    rehash Rehash pyenv shims (run this after installing executables)
    root Display the root directory where versions and shims are kept
    shell Set or show the shell-specific Python version
    shims List existing pyenv shims
    uninstall Uninstall a specific Python version
    version Show the current Python version(s) and its origin
    version-file Detect the file that sets the current pyenv version
    version-name Show the current Python version
    version-origin Explain how the current Python version is set
    versions List all Python versions available to pyenv
    virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
    virtualenv-delete Uninstall a specific Python virtualenv
    virtualenv-init Configure the shell environment for pyenv-virtualenv
    virtualenv-prefix Display real_prefix for a Python virtualenv version
    virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
    whence List all Python versions that contain the given executable
    which Display the full path to an executable

    See `pyenv help <command>' for information on a specific command.
    For full documentation, see: https://github.com/pyenv/pyenv#readme

    Using pyenv

    Install python version

    Using the pyenv install <Python-Version> command, you can install the desired Python version.
    -In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example.

    pyenv install 3.7.12

    If installed normally, the following message will be printed.

    $ pyenv install 3.7.12
    Downloading Python-3.7.12.tar.xz...
    -> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
    Installing Python-3.7.12...
    patching file Doc/library/ctypes.rst
    patching file Lib/test/test_unicode.py
    patching file Modules/_ctypes/_ctypes.c
    patching file Modules/_ctypes/callproc.c
    patching file Modules/_ctypes/ctypes.h
    patching file setup.py
    patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
    patching file Modules/_decimal/libmpdec/mpdecimal.h
    Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

    Create python virtual environment

    Create a Python virtual environment with the pyenv virtualenv <Installed-Python-Version> <Virtual-Environment-Name> command to create a Python virtual environment with the desired Python version.

    For example, let's create a Python virtual environment called demo with Python 3.7.12 version.

    pyenv virtualenv 3.7.12 demo
    $ pyenv virtualenv 3.7.12 demo
    Looking in links: /tmp/tmpffqys0gv
    Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
    Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

    Activating python virtual environment

    Use the pyenv activate <environment name> command to use the virtual environment created in this way.

    For example, we will use a Python virtual environment called demo.

    pyenv activate demo

    You can see that the information of the current virtual environment is printed at the front of the shell.

    Before

    mlops@ubuntu:~$ pyenv activate demo

    After

    pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
    (demo) mlops@ubuntu:~$

    Deactivating python virtual environment

    You can deactivate the currently active virtualenv by using the command source deactivate.

    source deactivate

    Before

    (demo) mlops@ubuntu:~$ source deactivate

    After

    mlops@ubuntu:~$ 
    - +In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example.

    pyenv install 3.7.12

    If installed normally, the following message will be printed.

    $ pyenv install 3.7.12
    Downloading Python-3.7.12.tar.xz...
    -> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
    Installing Python-3.7.12...
    patching file Doc/library/ctypes.rst
    patching file Lib/test/test_unicode.py
    patching file Modules/_ctypes/_ctypes.c
    patching file Modules/_ctypes/callproc.c
    patching file Modules/_ctypes/ctypes.h
    patching file setup.py
    patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
    patching file Modules/_decimal/libmpdec/mpdecimal.h
    Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

    Create python virtual environment

    Create a Python virtual environment with the pyenv virtualenv <Installed-Python-Version> <Virtual-Environment-Name> command to create a Python virtual environment with the desired Python version.

    For example, let's create a Python virtual environment called demo with Python 3.7.12 version.

    pyenv virtualenv 3.7.12 demo
    $ pyenv virtualenv 3.7.12 demo
    Looking in links: /tmp/tmpffqys0gv
    Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
    Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

    Activating python virtual environment

    Use the pyenv activate <environment name> command to use the virtual environment created in this way.

    For example, we will use a Python virtual environment called demo.

    pyenv activate demo

    You can see that the information of the current virtual environment is printed at the front of the shell.

    Before

    mlops@ubuntu:~$ pyenv activate demo

    After

    pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
    (demo) mlops@ubuntu:~$

    Deactivating python virtual environment

    You can deactivate the currently active virtualenv by using the command source deactivate.

    source deactivate

    Before

    (demo) mlops@ubuntu:~$ source deactivate

    After

    mlops@ubuntu:~$ 
    + \ No newline at end of file diff --git a/en/docs/1.0/further-readings/info/index.html b/en/docs/1.0/further-readings/info/index.html index 6b9b7b9b..7f07d5cb 100644 --- a/en/docs/1.0/further-readings/info/index.html +++ b/en/docs/1.0/further-readings/info/index.html @@ -7,7 +7,7 @@ - + @@ -19,8 +19,8 @@ | | Scheduling | Kubernetes | | Security & Compliance | Authentication & Authorization | Ldap | | | Data Encryption & Tokenization | Vault | -| | Governance & Auditing | Open Policy Agent |

    As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first.

    open-stacks-2.png

    For details:

    Mgmt.ComponentOpen Soruce
    Data Mgmt.CollectionKafka
    ValidationBeam
    Feature StoreFlink
    ML Model Dev. & ExperimentModelingJupyter
    Analysis & Experiment Mgmt.MLflow
    HPO Tuning & AutoMLKatib
    Deploy Mgmt.Serving FrameworkSeldon Core
    A/B TestIter8
    MonitoringGrafana, Prometheus
    Process Mgmt.pipelineKubeflow
    CI/CDGithub Action
    Continuous TrainingArgo Events
    Platform Mgmt.Configuration Mgmt.Consul
    Code Version Mgmt.Github, Minio
    Logging(EFK) Elastic Search, Fluentd, Kibana
    Resource Mgmt.Kubernetes
    - +| | Governance & Auditing | Open Policy Agent |

    As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first.

    open-stacks-2.png

    For details:

    Mgmt.ComponentOpen Soruce
    Data Mgmt.CollectionKafka
    ValidationBeam
    Feature StoreFlink
    ML Model Dev. & ExperimentModelingJupyter
    Analysis & Experiment Mgmt.MLflow
    HPO Tuning & AutoMLKatib
    Deploy Mgmt.Serving FrameworkSeldon Core
    A/B TestIter8
    MonitoringGrafana, Prometheus
    Process Mgmt.pipelineKubeflow
    CI/CDGithub Action
    Continuous TrainingArgo Events
    Platform Mgmt.Configuration Mgmt.Consul
    Code Version Mgmt.Github, Minio
    Logging(EFK) Elastic Search, Fluentd, Kibana
    Resource Mgmt.Kubernetes
    + \ No newline at end of file diff --git a/en/docs/1.0/introduction/component/index.html b/en/docs/1.0/introduction/component/index.html index 74e9b960..78f2cb5a 100644 --- a/en/docs/1.0/introduction/component/index.html +++ b/en/docs/1.0/introduction/component/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    3. Components of MLOps

    Practitioners guide to MLOps

    Google's white paper [Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning] published in May 2021 mentions the following core functionalities of MLOps:

    mlops-component

    Let's look at what each feature does.

    1. Experimentation

    Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:

    • Integration with version control tools like Git and a notebook (Jupyter Notebook) environment
    • Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics
    • Data and model analysis and visualization capabilities

    2. Data Processing

    Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:

    • Data connectors compatible with various data sources and services
    • Data encoders and decoders compatible with different data formats
    • Data transformation and feature engineering capabilities for different data types
    • Scalable batch and streaming data processing capabilities for training and serving

    3. Model Training

    Model Training offers functionalities to efficiently execute algorithms for model training:

    • Environment provisioning for ML framework execution
    • Distributed training environment for multiple GPUs and distributed training
    • Hyperparameter tuning and optimization capabilities

    4. Model Evaluation

    Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:

    • Model performance evaluation on evaluation datasets
    • Tracking prediction performance across different continuous training runs
    • Comparison and visualization of performance between different models
    • Model output interpretation using interpretable AI techniques

    5. Model Serving

    Model serving offers functionalities to deploy and serve models in production environments:

    • Low-latency and high-availability inference capabilities
    • Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)
    • Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results
    • Autoscaling capabilities to handle spiking inference requests
    • Logging of inference requests and results

    6. Online Experimentation

    Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models.

    • Canary and shadow deployment features
    • A/B testing capabilities
    • Multi-armed bandit testing functionality

    7. Model Monitoring

    Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates.

    8. ML Pipeline

    ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:

    • Pipeline execution through various event sources
    • ML metadata tracking and integration for pipeline parameter and artifact management
    • Support for built-in components for common ML tasks and user-defined components
    • Provisioning of different execution environments

    9. Model Registry

    The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository.

    • Registration, tracking, and versioning of trained and deployed models
    • Storage of information about the required data and runtime packages for deployment

    10. Dataset and Feature Repository

    • Sharing, search, reuse, and versioning capabilities for datasets
    • Real-time processing and low-latency serving capabilities for event streaming and online inference tasks
    • Support for various types of data, such as images, text, and tabular data

    11. ML Metadata and Artifact Tracking

    In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:

    • History management for ML artifacts
    • Tracking and sharing of experiments and pipeline parameter configurations
    • Storage, access, visualization, and download capabilities for ML artifacts
    • Integration with other MLOps functionalities
    - +
    Version: 1.0

    3. Components of MLOps

    Practitioners guide to MLOps

    Google's white paper [Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning] published in May 2021 mentions the following core functionalities of MLOps:

    mlops-component

    Let's look at what each feature does.

    1. Experimentation

    Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:

    • Integration with version control tools like Git and a notebook (Jupyter Notebook) environment
    • Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics
    • Data and model analysis and visualization capabilities

    2. Data Processing

    Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:

    • Data connectors compatible with various data sources and services
    • Data encoders and decoders compatible with different data formats
    • Data transformation and feature engineering capabilities for different data types
    • Scalable batch and streaming data processing capabilities for training and serving

    3. Model Training

    Model Training offers functionalities to efficiently execute algorithms for model training:

    • Environment provisioning for ML framework execution
    • Distributed training environment for multiple GPUs and distributed training
    • Hyperparameter tuning and optimization capabilities

    4. Model Evaluation

    Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:

    • Model performance evaluation on evaluation datasets
    • Tracking prediction performance across different continuous training runs
    • Comparison and visualization of performance between different models
    • Model output interpretation using interpretable AI techniques

    5. Model Serving

    Model serving offers functionalities to deploy and serve models in production environments:

    • Low-latency and high-availability inference capabilities
    • Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)
    • Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results
    • Autoscaling capabilities to handle spiking inference requests
    • Logging of inference requests and results

    6. Online Experimentation

    Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models.

    • Canary and shadow deployment features
    • A/B testing capabilities
    • Multi-armed bandit testing functionality

    7. Model Monitoring

    Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates.

    8. ML Pipeline

    ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:

    • Pipeline execution through various event sources
    • ML metadata tracking and integration for pipeline parameter and artifact management
    • Support for built-in components for common ML tasks and user-defined components
    • Provisioning of different execution environments

    9. Model Registry

    The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository.

    • Registration, tracking, and versioning of trained and deployed models
    • Storage of information about the required data and runtime packages for deployment

    10. Dataset and Feature Repository

    • Sharing, search, reuse, and versioning capabilities for datasets
    • Real-time processing and low-latency serving capabilities for event streaming and online inference tasks
    • Support for various types of data, such as images, text, and tabular data

    11. ML Metadata and Artifact Tracking

    In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:

    • History management for ML artifacts
    • Tracking and sharing of experiments and pipeline parameter configurations
    • Storage, access, visualization, and download capabilities for ML artifacts
    • Integration with other MLOps functionalities
    + \ No newline at end of file diff --git a/en/docs/1.0/introduction/intro/index.html b/en/docs/1.0/introduction/intro/index.html index cbc53ad8..babfdd72 100644 --- a/en/docs/1.0/introduction/intro/index.html +++ b/en/docs/1.0/introduction/intro/index.html @@ -7,14 +7,14 @@ - +
    Version: 1.0

    1. What is MLOps?

    Machine Learning Project

    Since 2012, when Alexnet was introduced, Machine Learning and Deep Learning have been introduced in any domain where data exists, such as Computer Vision and Natural Language Processing. Deep Learning and Machine Learning were referred to collectively as AI, and the need for AI was shouted from many media. And many companies conducted numerous projects using Machine Learning and Deep Learning. But what was the result? Byungchan Eum, the Head of North East Asia at Element AI, said “If 10 companies start an AI project, 9 of them will only be able to do concept validation (POC)”.

    In this way, in many projects, Machine Learning and Deep Learning only showed the possibility that they could solve this problem and then disappeared. And around this time, the outlook that AI Winter was coming again also began to emerge.

    Why did most projects end at the concept validation (POC) stage? Because it is impossible to operate an actual service with only Machine Learning and Deep Learning code.

    At the actual service stage, the portion taken up by machine learning and deep learning code is not as large as one would think, so one must consider many other aspects besides simply the performance of the model. Google has pointed out this problem in their 2015 paper Hidden Technical Debt in Machine Learning Systems. However, at the time this paper was released, many ML engineers were busy proving the potential of deep learning and machine learning, so the points made in the paper were not given much attention.

    And after a few years, machine learning and deep learning had proven their potential and people were now looking to apply it to actual services. However, soon many people realized that actual services were not as easy as they thought.

    Devops

    MLOps is not a new concept, but rather a term derived from the development methodology called DevOps. Therefore, understanding DevOps can help in understanding MLOps.

    DevOps

    DevOps is a portmanteau of "Development" and "Operations," referring to a development and operations methodology that emphasizes communication, collaboration, and integration between software developers and IT professionals. It encompasses both the development and operation phases of software, aiming to achieve a symbiotic relationship between the two. The primary goal of DevOps is to enable organizations to develop and deploy software products and services rapidly by fostering close collaboration and interdependence between development and operations teams.

    Silo Effect

    Let's explore why DevOps is necessary through a simple scenario.

    In the early stages of a service, there are fewer supported features, and the team or company is relatively small. At this point, there may not be a clear distinction between development and operations, or the teams may be small. The key point here is the small scale. In such cases, there are many points of contact for effective communication, and with a limited number of services to focus on, it is possible to rapidly improve the service.

    However, as the service scales up, the development and operations teams tend to separate, and the physical limitations of communication channels become apparent. For example, in meetings involving multiple teams, only team leaders or a small number of seniors may attend, rather than the entire team. These limitations in communication channels inevitably lead to a lack of communication. Consequently, the development team continues to develop new features, while the operations team faces issues during deployment caused by the features developed by the development team.

    When such situations are repeated, it can lead to organizational silos, a phenomenon known as silo mentality.

    silo

    Indeed, the term "silo" originally refers to a tall, cylindrical structure used for storing grain or livestock feed. Silos are designed to keep the stored materials separate and prevent them from mixing. -In the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.

    The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations.

    CI/CD

    Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams.

    cicd

    Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams.

    MLOps

    1) ML + Ops

    DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience.

    MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.

    MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system.

    Rule-Based Approach

    In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification.

    Machine Learning Approach

    As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed.

    Deep Learning Approach

    When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed.

    By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner.

    graph

    If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning.

    If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge.

    Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively.

    2) ML -> Ops

    However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects.

    In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary.

    3) Conclusion

    In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models.

    - +In the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.

    The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations.

    CI/CD

    Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams.

    cicd

    Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams.

    MLOps

    1) ML + Ops

    DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience.

    MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.

    MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system.

    Rule-Based Approach

    In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification.

    Machine Learning Approach

    As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed.

    Deep Learning Approach

    When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed.

    By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner.

    graph

    If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning.

    If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge.

    Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively.

    2) ML -> Ops

    However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects.

    In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary.

    3) Conclusion

    In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models.

    + \ No newline at end of file diff --git a/en/docs/1.0/introduction/levels/index.html b/en/docs/1.0/introduction/levels/index.html index a68c5a8d..7a3def1f 100644 --- a/en/docs/1.0/introduction/levels/index.html +++ b/en/docs/1.0/introduction/levels/index.html @@ -7,14 +7,14 @@ - +
    Version: 1.0

    2. Levels of MLOps

    This page will look at the steps of MLOps outlined by Google and explore what the core features of MLOps are.

    Hidden Technical Debt in ML System

    Google has been talking about the need for MLOps since as far back as 2015. The paper Hidden Technical Debt in Machine Learning Systems encapsulates this idea from Google.

    paper

    The key takeaway from this paper is that the machine learning code is only a small part of the entire system when it comes to building products with machine learning.

    Google developed MLOps by evolving this paper and expanding the term. More details can be found on the Google Cloud homepage. In this post, we will try to explain what Google means by MLOps.

    Google divided the evolution of MLOps into three (0-2) stages. Before explaining each stage, let's review some of the concepts described in the previous post.

    In order to operate a machine learning model, there is a machine learning team responsible for developing the model and an operations team responsible for deployment and operations. MLOps is needed for the successful collaboration of these two teams. We have previously said that it can be done simply through Continuous Integration (CI) / Continuous Deployment (CD), so let us see how to do CI / CD.

    Level 0: Manual Process

    level-0

    At the 0th stage, two teams communicate through a "model". The machine learning team trains the model with accumulated data and delivers the trained model to the operation team. The operation team then deploys the model delivered in this way.

    toon

    Initial machine learning models are deployed through this "model" centered communication. However, there are several problems with this distribution method. For example, if some functions use Python 3.7 and some use Python 3.8, we often see the following situation.

    The reason for this situation lies in the characteristics of the machine learning model. Three things are needed for the trained machine learning model to work:

    1. Python code
    2. Trained weights
    3. Environment (Packages, versions)

    If any of these three aspects is communicated incorrectly, the model may fail to function or make unexpected predictions. However, in many cases, models fail to work due to environmental mismatches. Machine learning relies on various open-source libraries, and due to the nature of open-source, even the same function can produce different results depending on the version used.

    In the early stages of a service, when there are not many models to manage, these issues can be resolved quickly. However, as the number of managed features increases and communication becomes more challenging, it becomes difficult to deploy models with better performance quickly.

    Level 1: Automated ML Pipeline

    Pipeline

    level-1-pipeline

    So, in MLOps, "pipeline" is used to prevent such problems. The MLOps pipeline ensures that the model operates in the same environment as the one used by the machine learning engineer during model development, using containers like Docker. This helps prevent situations where the model doesn't work due to differences in the environment.

    However, the term "pipeline" is used in a broader context and in various tasks. What is the role of the pipeline that machine learning engineers create? The pipeline created by machine learning engineers produces trained models. Therefore, it would be more accurate to refer to it as a training pipeline rather than just a pipeline.

    Continuous Training

    level-1-ct.png

    And the concept of Continuous Training (CT) is added. So why is CT necessary?

    Auto Retrain

    In the real world, data exhibits a characteristic called "Data Shift," where the data distribution keeps changing over time. As a result, models trained in the past may experience performance degradation over time. The simplest and most effective solution to this problem is to retrain the model using recent data. By retraining the model according to the changed data distribution, it can regain its performance.

    Auto Deploy

    However, in industries such as manufacturing, where multiple recipes are processed in a single factory, it may not always be desirable to retrain the model unconditionally. One common example is the blind spot.

    For example, in an automotive production line, a model A was created and used for predictions. If an entirely different model B is introduced, it represents unseen data patterns, and a new model is trained for model B.

    Now, the model will make predictions for model B. However, if the data switches back to model A, what should be done? -If there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.

    There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy.

    To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously.

    Level 2: Automating the CI/CD Pipeline

    level-2

    The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?

    In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline.

    Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training.

    In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly.

    In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model.

    - +If there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.

    There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy.

    To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously.

    Level 2: Automating the CI/CD Pipeline

    level-2

    The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?

    In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline.

    Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training.

    In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly.

    In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model.

    + \ No newline at end of file diff --git a/en/docs/1.0/introduction/why_kubernetes/index.html b/en/docs/1.0/introduction/why_kubernetes/index.html index fca9f2d4..dd2ff30a 100644 --- a/en/docs/1.0/introduction/why_kubernetes/index.html +++ b/en/docs/1.0/introduction/why_kubernetes/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    4. Why Kubernetes?

    MLOps & Kubernetes

    When talking about MLOps, why is the word Kubernetes always heard together?

    To build a successful MLOps system, various components are needed as described in Components of MLOps, but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure.

    The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as 'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster? when developing and deploying machine learning models while a few developers share a small number of clusters.

    Container

    Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as follows.

    Container: Standardized, portable packaging of an application's code, libraries, and configuration files

    But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as "It works on my notebook, why not yours?".

    One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.

    However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps.

    Container Orchestration System

    Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.

    In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues.

    For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations.

    A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier.

    How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal.

    As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems.

    According to the survey released by CNCF in 2018, Kubernetes was already showing its prominence. The survey published in 2019 indicates that 78% of respondents were using Kubernetes at a production level.

    k8s-graph

    The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.

    - +
    Version: 1.0

    4. Why Kubernetes?

    MLOps & Kubernetes

    When talking about MLOps, why is the word Kubernetes always heard together?

    To build a successful MLOps system, various components are needed as described in Components of MLOps, but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure.

    The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as 'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster? when developing and deploying machine learning models while a few developers share a small number of clusters.

    Container

    Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as follows.

    Container: Standardized, portable packaging of an application's code, libraries, and configuration files

    But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as "It works on my notebook, why not yours?".

    One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.

    However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps.

    Container Orchestration System

    Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.

    In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues.

    For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations.

    A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier.

    How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal.

    As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems.

    According to the survey released by CNCF in 2018, Kubernetes was already showing its prominence. The survey published in 2019 indicates that 78% of respondents were using Kubernetes at a production level.

    k8s-graph

    The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html b/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html index 7abfcd5f..cd5197c1 100644 --- a/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html +++ b/en/docs/1.0/kubeflow-dashboard-guide/experiments-and-others/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    6. Kubeflow Pipeline Relates

    In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs.

    left-tabs

    Kubeflow Pipelines are the main reason for using Kubeflow in MLOps for ALL, and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in 3.Kubeflow.

    - +
    Version: 1.0

    6. Kubeflow Pipeline Relates

    In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs.

    left-tabs

    Kubeflow Pipelines are the main reason for using Kubeflow in MLOps for ALL, and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in 3.Kubeflow.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow-dashboard-guide/experiments/index.html b/en/docs/1.0/kubeflow-dashboard-guide/experiments/index.html index b5b94061..14ca863f 100644 --- a/en/docs/1.0/kubeflow-dashboard-guide/experiments/index.html +++ b/en/docs/1.0/kubeflow-dashboard-guide/experiments/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    5. Experiments(AutoML)

    Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard.

    left-tabs

    automl

    The Experiments(AutoML) page is where you can manage Katib, which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow.

    The usage of Katib and Experiments(AutoML) is not covered in MLOps for Everyone v1.0, and will be added in v2.0.

    - +
    Version: 1.0

    5. Experiments(AutoML)

    Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard.

    left-tabs

    automl

    The Experiments(AutoML) page is where you can manage Katib, which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow.

    The usage of Katib and Experiments(AutoML) is not covered in MLOps for Everyone v1.0, and will be added in v2.0.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow-dashboard-guide/intro/index.html b/en/docs/1.0/kubeflow-dashboard-guide/intro/index.html index 22dd33d1..0d1e32b1 100644 --- a/en/docs/1.0/kubeflow-dashboard-guide/intro/index.html +++ b/en/docs/1.0/kubeflow-dashboard-guide/intro/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    1. Central Dashboard

    Once you have completed Kubeflow installation, you can access the dashboard through the following command.

    kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

    after-login

    The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side

    left-tabs

    • Home
    • Notebooks
    • Tensorboards
    • Volumes
    • Models
    • Experiments(AutoML)
    • Experiments(KFP)
    • Pipelines
    • Runs
    • Recurring Runs
    • Artifacts
    • Executions

    Let's now look at the simple usage of each feature.

    - +
    Version: 1.0

    1. Central Dashboard

    Once you have completed Kubeflow installation, you can access the dashboard through the following command.

    kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

    after-login

    The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side

    left-tabs

    • Home
    • Notebooks
    • Tensorboards
    • Volumes
    • Models
    • Experiments(AutoML)
    • Experiments(KFP)
    • Pipelines
    • Runs
    • Recurring Runs
    • Artifacts
    • Executions

    Let's now look at the simple usage of each feature.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html b/en/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html index fe9b73a2..c2eec359 100644 --- a/en/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html +++ b/en/docs/1.0/kubeflow-dashboard-guide/notebooks/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    2. Notebooks

    Launch Notebook Server

    Click on the Notebooks tab on the left side of the Central Dashboard.

    left-tabs

    You will see a similar screen.

    The Notebooks tab is a page where users can independently create and access jupyter notebook and code server environments (hereinafter referred to as a notebook server).

    notebook-home

    Click the "+ NEW NOTEBOOK" button at the top right.

    new-notebook

    When the screen shown below appears, now specify the spec (Spec) of the notebook server to be created.

    create

    For details for spec:
    • name:
      • Specifies a name to identify the notebook server.
    • namespace:
      • Cannot be changed. (It is automatically set to the namespace of the currently logged-in user account.)
    • Image:
      • Selects the image to use from pre-installed JupyterLab images with Python packages like sklearn, pytorch, tensorflow, etc.
        • If you want to use an image that utilizes GPU within the notebook server, refer to the GPUs section below.
      • If you want to use a custom notebook server that includes additional packages or source code, you can create a custom image and deploy it for use.
    • CPU / RAM:
      • Specifies the amount of resources required.
        • cpu: in core units
          • Represents the number of virtual cores, and can also be specified as a float value such as 1.5, 2.7, etc.
        • memory: in Gi units
    • GPUs:
      • Specifies the number of GPUs to allocate to the Jupyter notebook.
        • None
          • When GPU resources are not required.
        • 1, 2, 4
          • Allocates 1, 2, or 4 GPUs.
      • GPU Vendor:
        • If you have followed the (Optional) Setup GPU guide and installed the NVIDIA GPU plugin, select NVIDIA.
    • Workspace Volume:
      • Specifies the amount of disk space required within the notebook server.
      • Do not change the Type and Name fields unless you want to increase the disk space or change the AccessMode.
        • Check the "Don't use Persistent Storage for User's home" checkbox only if it is not necessary to save the notebook server's work. It is generally recommended not to check this option.
        • If you want to use a pre-existing Persistent Volume Claim (PVC), select Type as "Existing" and enter the name of the PVC to use.
    • Data Volumes:
      • If additional storage resources are required, click the "+ ADD VOLUME" button to create them.
    • Configurations, Affinity/Tolerations, Miscellaneous Settings
      • These are generally not needed, so detailed explanations are omitted in MLOps for All.

    If you followed the Setup GPU (Optional), select NVIDIA if you have installed the nvidia gpu plugin.

    creating

    After creation, the Status will change to a green check mark icon, and the CONNECT button will be activated. created


    Accessing the Notebook Server

    Clicking the CONNECT button will open a new browser window, where you will see the following screen:

    notebook-access

    You can use the Notebook, Console, and Terminal icons in the Launcher to start using them.

    Notebook Interface

    notebook-console

    Terminal Interface

    terminal-console


    Stopping the Notebook Server

    If you haven't used the notebook server for an extended period of time, you can stop it to optimize resource usage in the Kubernetes cluster. Note that stopping the notebook server will result in the deletion of all data stored outside the Workspace Volume or Data Volume specified when creating the notebook server.
    -If you haven't changed the path during notebook server creation, the default Workspace Volume path is /home/jovyan inside the notebook server, so any data stored outside the /home/jovyan directory will be deleted.

    Clicking the STOP button as shown below will stop the notebook server:

    notebook-stop

    Once the server is stopped, the CONNECT button will be disabled. To restart the notebook server and use it again, click the PLAY button.

    notebook-restart

    - +If you haven't changed the path during notebook server creation, the default Workspace Volume path is /home/jovyan inside the notebook server, so any data stored outside the /home/jovyan directory will be deleted.

    Clicking the STOP button as shown below will stop the notebook server:

    notebook-stop

    Once the server is stopped, the CONNECT button will be disabled. To restart the notebook server and use it again, click the PLAY button.

    notebook-restart

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html b/en/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html index 35913019..f705bffc 100644 --- a/en/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html +++ b/en/docs/1.0/kubeflow-dashboard-guide/tensorboards/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    3. Tensorboards

    Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next.

    left-tabs

    We can see the following screen.

    tensorboard

    The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes.

    You can refer to the TensorBoard documentation for more information on using TensorBoard with Kubeflow Pipeline runs.

    There are various ways to visualize the results of Kubeflow Pipeline runs, and in MLOps for ALL, we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context.

    - +
    Version: 1.0

    3. Tensorboards

    Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next.

    left-tabs

    We can see the following screen.

    tensorboard

    The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes.

    You can refer to the TensorBoard documentation for more information on using TensorBoard with Kubeflow Pipeline runs.

    There are various ways to visualize the results of Kubeflow Pipeline runs, and in MLOps for ALL, we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow-dashboard-guide/volumes/index.html b/en/docs/1.0/kubeflow-dashboard-guide/volumes/index.html index d1a4082e..0a19c186 100644 --- a/en/docs/1.0/kubeflow-dashboard-guide/volumes/index.html +++ b/en/docs/1.0/kubeflow-dashboard-guide/volumes/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    4. Volumes

    Volumes

    Next, let's click on the Volumes tab in the left of the Central Dashboard.

    left-tabs

    You will see the following screen.

    volumes

    Volumes tab provides the functionality to manage the Persistent Volume Claims (PVC) belonging to the current user's namespace in Kubernetes' Volume (Volume).

    By looking at the screenshot, you can see the information of the Volume created on the 1. Notebooks page. It can be seen that the Storage Class of the Volume is set to local-path, which is the Default Storage Class installed at the time of Kubernetes cluster installation.

    In addition, the Volumes page can be used if you want to create, view, or delete a new Volume in the user namespace.


    Creating a Volume

    By clicking the + NEW VOLUME button at the top right, you can see the following screen.

    new-volume

    You can create a volume by specifying its name, size, storage class, and access mode.

    When you specify the desired resource specs to create a volume, its Status will be shown as Pending on this page. When you hover over the Status icon, you will see a message that this (This volume will be bound when its first consumer is created.)
    This is according to the volume creation policy of the StorageClass used in the lab, which is local-path. This is not a problem situation.
    -When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time.

    creating-volume

    - +When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time.

    creating-volume

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/advanced-component/index.html b/en/docs/1.0/kubeflow/advanced-component/index.html index 28a6d2d6..51708319 100644 --- a/en/docs/1.0/kubeflow/advanced-component/index.html +++ b/en/docs/1.0/kubeflow/advanced-component/index.html @@ -7,7 +7,7 @@ - + @@ -19,8 +19,8 @@ All the _path suffixes have disappeared from the arguments received in the input and output.
    We can see that instead of accessing iris_data.outputs["data_path"], we are accessing iris_data.outputs["data"].
    This happens because Kubeflow has a rule that paths created with InputPath and OutputPath can be accessed without the _path suffix when accessed from the pipeline.

    However, if you upload the pipeline just written, it will not run.
    -The reason is explained on the next page.

    - +The reason is explained on the next page.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/advanced-environment/index.html b/en/docs/1.0/kubeflow/advanced-environment/index.html index 3560998a..6fb663a0 100644 --- a/en/docs/1.0/kubeflow/advanced-environment/index.html +++ b/en/docs/1.0/kubeflow/advanced-environment/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Kubeflow uses Kubernetes, so the component wrapper runs the component content on its own separate container.

    In detail, the image specified in the generated train_from_csv.yaml is image: python:3.7.

    There may be some people who notice why it is not running for some reason.

    The python:3.7 image does not have the packages we want to use, such as dill, pandas, and sklearn, installed.
    Therefore, when executing, it fails with an error indicating that the packages are not found.

    So, how can we add the packages?

    Adding packages

    During the process of converting Kubeflow, there are two ways to add packages:

    1. Using base_image
    2. Using package_to_install

    Let's check what arguments the function create_component_from_func used to compile the components can receive.

    def create_component_from_func(
    func: Callable,
    output_component_file: Optional[str] = None,
    base_image: Optional[str] = None,
    packages_to_install: List[str] = None,
    annotations: Optional[Mapping[str, str]] = None,
    ):
    • func: Function that creates the component wrapper to be made into a component.
    • base_image: Image that the component wrapper will run on.
    • packages_to_install: Additional packages that need to be installed for the component to use.

    1. base_image

    Take a closer look at the sequence in which the component is executed and it will be as follows:

    1. docker pull base_image
    2. pip install packages_to_install
    3. run command

    If the base_image used by the component already has all the packages installed, you can use it without installing additional packages.

    For example, on this page we are going to write a Dockerfile like this:

    FROM python:3.7

    RUN pip install dill pandas scikit-learn

    Let's build the image using the Dockerfile above. The Docker hub we will use for the practice is ghcr.
    You can choose a Docker hub according to your environment and upload it.

    docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image
    docker push ghcr.io/mlops-for-all/base-image

    Now let's try inputting the base image.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    base_image="ghcr.io/mlops-for-all/base-image:latest",
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    If you compile the generated component, it will appear as follows.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: ghcr.io/mlops-for-all/base-image:latest
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    We can confirm that the base_image has been changed to the value we have set.

    2. packages_to_install

    However, when packages are added, it takes a lot of time to create a new Docker image. -In this case, we can use the packages_to_install argument to easily add packages to the container.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    If you execute the script, the train_from_csv.yaml file will be generated.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    If we take a closer look at the order in which the components written above are executed, it looks like this:

    1. docker pull python:3.7
    2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
    3. run command

    When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors.

        command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - +In this case, we can use the packages_to_install argument to easily add packages to the container.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    If you execute the script, the train_from_csv.yaml file will be generated.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    If we take a closer look at the order in which the components written above are executed, it looks like this:

    1. docker pull python:3.7
    2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
    3. run command

    When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors.

        command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/advanced-mlflow/index.html b/en/docs/1.0/kubeflow/advanced-mlflow/index.html index c73413fb..60ecd1bd 100644 --- a/en/docs/1.0/kubeflow/advanced-mlflow/index.html +++ b/en/docs/1.0/kubeflow/advanced-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -21,8 +21,8 @@ At this time, configure the uploaded MLFlow endpoint to be connected to the mlflow service that we installed.
    In this case, use the Kubernetes Service DNS Name of the Minio installed at the time of MLFlow Server installation. As this service is created in the Kubeflow namespace with the name minio-service, set it to http://minio-service.kubeflow.svc:9000.
    Similarly, for the tracking_uri address, use the Kubernetes Service DNS Name of the MLFlow server and set it to http://mlflow-server-service.mlflow-system.svc:5000.

    from functools import partial
    from kfp.components import InputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    MLFlow Pipeline

    Now let's connect the components we have written and create a pipeline.

    Data Component

    The data we will use to train the model is sklearn's iris. -We will write a component to generate the data.

    from functools import partial

    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    Pipeline

    The pipeline code can be written as follows.

    from kfp.dsl import pipeline


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )

    Run

    If you organize the components and pipelines written above into a single Python file, it would look like this.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

    mlflow_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: mlflow-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
    "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
    spec:
    entrypoint: mlflow-pipeline
    templates:
    - name: load-iris-data
    container:
    args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
    && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def load_iris_data(
    data_path,
    target_path,
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    import argparse
    _parser = argparse.ArgumentParser(prog='Load iris data', description='')
    _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = load_iris_data(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: load-iris-data-data, path: /tmp/outputs/data/data}
    - {name: load-iris-data-target, path: /tmp/outputs/target/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
    PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    ''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
    pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
    = pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
    = pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
    index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
    = argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
    dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
    "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
    "data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
    - name: mlflow-pipeline
    inputs:
    parameters:
    - {name: kernel}
    - {name: model_name}
    dag:
    tasks:
    - {name: load-iris-data, template: load-iris-data}
    - name: train-from-csv
    template: train-from-csv
    dependencies: [load-iris-data]
    arguments:
    parameters:
    - {name: kernel, value: '{{inputs.parameters.kernel}}'}
    artifacts:
    - {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
    - {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
    - name: upload-sklearn-model-to-mlflow
    template: upload-sklearn-model-to-mlflow
    dependencies: [train-from-csv]
    arguments:
    parameters:
    - {name: model_name, value: '{{inputs.parameters.model_name}}'}
    artifacts:
    - {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
    - {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
    - {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
    - {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
    - name: train-from-csv
    container:
    args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
    --kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
    --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
    --conda-env, /tmp/outputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    kernel,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: kernel}
    artifacts:
    - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
    - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
    outputs:
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/outputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
    {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
    {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
    "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
    "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
    ''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
    pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
    dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
    mlflow.models.signature import infer_signature\n from mlflow.utils.environment
    import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
    = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
    train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
    file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
    \"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
    = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
    \"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
    = _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
    \"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
    file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
    from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
    type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
    dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
    dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
    {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
    "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
    {"name": "input_example", "type": "dill"}, {"name": "signature", "type":
    "dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
    - name: upload-sklearn-model-to-mlflow
    container:
    args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
    --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
    --conda-env, /tmp/inputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' 'boto3' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def upload_sklearn_model_to_mlflow(
    model_name,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    import argparse
    _parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
    _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: model_name}
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/inputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
    "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
    {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
    --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
    ''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
    os\n import dill\n from mlflow.sklearn import save_model\n\n from
    mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
    = \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
    = \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
    = MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
    open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
    open(input_example_path, \"rb\") as file_reader:\n input_example
    = dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
    = dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
    = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
    = client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
    model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
    sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
    dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
    dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
    dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
    {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
    {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
    "name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
    arguments:
    parameters:
    - {name: kernel}
    - {name: model_name}
    serviceAccountName: pipeline-runner

    After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run.

    mlflow-svc-0

    Port-forward the mlflow service to access the MLflow UI.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows.

    mlflow-svc-1

    Click on run to verify that the trained model file is present.

    mlflow-svc-2

    - +We will write a component to generate the data.

    from functools import partial

    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    Pipeline

    The pipeline code can be written as follows.

    from kfp.dsl import pipeline


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )

    Run

    If you organize the components and pipelines written above into a single Python file, it would look like this.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

    mlflow_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: mlflow-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
    "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
    spec:
    entrypoint: mlflow-pipeline
    templates:
    - name: load-iris-data
    container:
    args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
    && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def load_iris_data(
    data_path,
    target_path,
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    import argparse
    _parser = argparse.ArgumentParser(prog='Load iris data', description='')
    _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = load_iris_data(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: load-iris-data-data, path: /tmp/outputs/data/data}
    - {name: load-iris-data-target, path: /tmp/outputs/target/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
    PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    ''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
    pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
    = pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
    = pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
    index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
    = argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
    dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
    "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
    "data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
    - name: mlflow-pipeline
    inputs:
    parameters:
    - {name: kernel}
    - {name: model_name}
    dag:
    tasks:
    - {name: load-iris-data, template: load-iris-data}
    - name: train-from-csv
    template: train-from-csv
    dependencies: [load-iris-data]
    arguments:
    parameters:
    - {name: kernel, value: '{{inputs.parameters.kernel}}'}
    artifacts:
    - {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
    - {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
    - name: upload-sklearn-model-to-mlflow
    template: upload-sklearn-model-to-mlflow
    dependencies: [train-from-csv]
    arguments:
    parameters:
    - {name: model_name, value: '{{inputs.parameters.model_name}}'}
    artifacts:
    - {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
    - {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
    - {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
    - {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
    - name: train-from-csv
    container:
    args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
    --kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
    --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
    --conda-env, /tmp/outputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    kernel,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: kernel}
    artifacts:
    - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
    - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
    outputs:
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/outputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
    {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
    {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
    "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
    "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
    ''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
    pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
    dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
    mlflow.models.signature import infer_signature\n from mlflow.utils.environment
    import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
    = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
    train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
    file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
    \"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
    = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
    \"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
    = _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
    \"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
    file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
    from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
    type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
    dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
    dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
    {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
    "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
    {"name": "input_example", "type": "dill"}, {"name": "signature", "type":
    "dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
    - name: upload-sklearn-model-to-mlflow
    container:
    args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
    --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
    --conda-env, /tmp/inputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' 'boto3' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def upload_sklearn_model_to_mlflow(
    model_name,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    import argparse
    _parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
    _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: model_name}
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/inputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
    "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
    {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
    --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
    ''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
    os\n import dill\n from mlflow.sklearn import save_model\n\n from
    mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
    = \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
    = \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
    = MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
    open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
    open(input_example_path, \"rb\") as file_reader:\n input_example
    = dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
    = dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
    = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
    = client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
    model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
    sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
    dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
    dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
    dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
    {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
    {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
    "name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
    arguments:
    parameters:
    - {name: kernel}
    - {name: model_name}
    serviceAccountName: pipeline-runner

    After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run.

    mlflow-svc-0

    Port-forward the mlflow service to access the MLflow UI.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows.

    mlflow-svc-1

    Click on run to verify that the trained model file is present.

    mlflow-svc-2

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/advanced-pipeline/index.html b/en/docs/1.0/kubeflow/advanced-pipeline/index.html index 5349e823..fd68d099 100644 --- a/en/docs/1.0/kubeflow/advanced-pipeline/index.html +++ b/en/docs/1.0/kubeflow/advanced-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ If you need to train a model using a GPU and the Kubernetes environment doesn't allocate a GPU, the training may not be performed correctly.
    To address this, you can use the set_gpu_limit() attribute to set the GPU limit.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    If you execute the above script, you can see that the resources has been added with {nvidia.com/gpu: 1} in the generated file when you look closely at sum-and-print-numbers. Through this, you can allocate a GPU.

      - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    resources:
    limits: {nvidia.com/gpu: 1}

    CPU

    The function to set the number of CPUs can be set using the .set_cpu_limit() attribute attribute.
    -The difference from GPUs is that the input must be a string, not an int.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed part only can be confirmed as follows.

          resources:
    limits: {nvidia.com/gpu: 1, cpu: '16'}

    Memory

    Memory can be set using the .set_memory_limit() attribute.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed parts are as follows if checked.

          resources:
    limits: {nvidia.com/gpu: 1, memory: 1G}
    - +The difference from GPUs is that the input must be a string, not an int.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed part only can be confirmed as follows.

          resources:
    limits: {nvidia.com/gpu: 1, cpu: '16'}

    Memory

    Memory can be set using the .set_memory_limit() attribute.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed parts are as follows if checked.

          resources:
    limits: {nvidia.com/gpu: 1, memory: 1G}
    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/advanced-run/index.html b/en/docs/1.0/kubeflow/advanced-run/index.html index cd45a614..8ccca3eb 100644 --- a/en/docs/1.0/kubeflow/advanced-run/index.html +++ b/en/docs/1.0/kubeflow/advanced-run/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ Graph, Run Output, and Config.

    advanced-run-0.png

    Graph

    advanced-run-1.png

    In the graph, if you click on the run component, you can check the running information of the component.

    Input/Output

    The Input/Output tab allows you to view and download the Configurations, Input, and Output Artifacts used in the components.

    Logs

    In the Logs tab, you can view all the stdout output generated during the execution of the Python code. However, pods are deleted after a certain period of time, so you may not be able to view them in this tab after a certain time. In that case, you can check them in the main-logs section of the Output artifacts.

    Visualizations

    The Visualizations tab displays plots generated by the components.

    To generate a plot, you can save the desired values as an argument using mlpipeline_ui_metadata: OutputPath("UI_Metadata"). The plot should be in HTML format. -The conversion process is as follows.


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(
    mlpipeline_ui_metadata: OutputPath("UI_Metadata")
    ):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot(x=[1, 2, 3], y=[1, 2,3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    If written in pipeline, it will be like this.

    from functools import partial

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot([1, 2, 3], [1, 2, 3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)


    @pipeline(name="plot_pipeline")
    def plot_pipeline():
    plot_linear()


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")

    If you run this script and check the resulting plot_pipeline.yaml, you will see the following.

    plot_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: plot-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: '2
    022-01-17T13:31:32.963214',
    pipelines.kubeflow.org/pipeline_spec: '{"name": "plot_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}
    spec:
    entrypoint: plot-pipeline
    templates:
    - name: plot-linear
    container:
    args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'matplotlib' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet
    --no-warn-script-location 'matplotlib' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path
    def plot_linear(mlpipeline_ui_metadata):
    import base64
    import json
    from io import BytesIO
    import matplotlib.pyplot as plt
    plt.plot([1, 2, 3], [1, 2, 3])
    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")
    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Plot linear', description='')
    _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())
    _outputs = plot_linear(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.9
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''matplotlib'' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''matplotlib''
    --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef plot_linear(mlpipeline_ui_metadata):\n import
    base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot
    as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile,
    format=\"png\")\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\"utf-8\")\n\n html
    = f\"<img src=''data:image/png;base64,{encoded}''>\"\n metadata = {\n \"outputs\":
    [\n {\n \"type\": \"web-app\",\n \"storage\":
    \"inline\",\n \"source\": html,\n },\n ],\n }\n with
    open(mlpipeline_ui_metadata, \"w\") as html_writer:\n json.dump(metadata,
    html_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Plot
    linear'', description='''')\n_parser.add_argument(\"--mlpipeline-ui-metadata\",
    dest=\"mlpipeline_ui_metadata\", type=_make_parent_dirs_and_return_path,
    required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs
    = plot_linear(**_parsed_args)\n"], "image": "python:3.7"}}, "name": "Plot
    linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}',
    pipelines.kubeflow.org/component_ref: '{}'}
    - name: plot-pipeline
    dag:
    tasks:
    - {name: plot-linear, template: plot-linear}
    arguments:
    parameters: []
    serviceAccountName: pipeline-runner

    After running, click Visualization.

    advanced-run-5.png

    Run output

    advanced-run-2.png

    Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric).

    To show the evaluation index (Metric), you can save the name and value you want to show in the mlpipeline_metrics_path: OutputPath("Metrics") argument in json format. For example, you can write it like this.

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    We will add a component to generate evaluation metrics to the pipeline created in the Pipeline and execute it. The whole pipeline is as follows.

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int) -> int:
    sum_number = number_1 + number_2
    print(sum_number)
    return sum_number

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )
    show_metric_of_sum(sum_result.output)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    After execution, click Run Output and it will show like this.

    advanced-run-4.png

    Config

    advanced-run-3.png

    In the Config tab, you can view all the values received as pipeline configurations.

    - +The conversion process is as follows.


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(
    mlpipeline_ui_metadata: OutputPath("UI_Metadata")
    ):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot(x=[1, 2, 3], y=[1, 2,3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    If written in pipeline, it will be like this.

    from functools import partial

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot([1, 2, 3], [1, 2, 3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)


    @pipeline(name="plot_pipeline")
    def plot_pipeline():
    plot_linear()


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")

    If you run this script and check the resulting plot_pipeline.yaml, you will see the following.

    plot_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: plot-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: '2
    022-01-17T13:31:32.963214',
    pipelines.kubeflow.org/pipeline_spec: '{"name": "plot_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}
    spec:
    entrypoint: plot-pipeline
    templates:
    - name: plot-linear
    container:
    args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'matplotlib' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet
    --no-warn-script-location 'matplotlib' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path
    def plot_linear(mlpipeline_ui_metadata):
    import base64
    import json
    from io import BytesIO
    import matplotlib.pyplot as plt
    plt.plot([1, 2, 3], [1, 2, 3])
    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")
    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Plot linear', description='')
    _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())
    _outputs = plot_linear(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.9
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''matplotlib'' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''matplotlib''
    --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef plot_linear(mlpipeline_ui_metadata):\n import
    base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot
    as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile,
    format=\"png\")\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\"utf-8\")\n\n html
    = f\"<img src=''data:image/png;base64,{encoded}''>\"\n metadata = {\n \"outputs\":
    [\n {\n \"type\": \"web-app\",\n \"storage\":
    \"inline\",\n \"source\": html,\n },\n ],\n }\n with
    open(mlpipeline_ui_metadata, \"w\") as html_writer:\n json.dump(metadata,
    html_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Plot
    linear'', description='''')\n_parser.add_argument(\"--mlpipeline-ui-metadata\",
    dest=\"mlpipeline_ui_metadata\", type=_make_parent_dirs_and_return_path,
    required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs
    = plot_linear(**_parsed_args)\n"], "image": "python:3.7"}}, "name": "Plot
    linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}',
    pipelines.kubeflow.org/component_ref: '{}'}
    - name: plot-pipeline
    dag:
    tasks:
    - {name: plot-linear, template: plot-linear}
    arguments:
    parameters: []
    serviceAccountName: pipeline-runner

    After running, click Visualization.

    advanced-run-5.png

    Run output

    advanced-run-2.png

    Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric).

    To show the evaluation index (Metric), you can save the name and value you want to show in the mlpipeline_metrics_path: OutputPath("Metrics") argument in json format. For example, you can write it like this.

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    We will add a component to generate evaluation metrics to the pipeline created in the Pipeline and execute it. The whole pipeline is as follows.

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int) -> int:
    sum_number = number_1 + number_2
    print(sum_number)
    return sum_number

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )
    show_metric_of_sum(sum_result.output)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    After execution, click Run Output and it will show like this.

    advanced-run-4.png

    Config

    advanced-run-3.png

    In the Config tab, you can view all the values received as pipeline configurations.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/basic-component/index.html b/en/docs/1.0/kubeflow/basic-component/index.html index b0e418b5..e45c4bc0 100644 --- a/en/docs/1.0/kubeflow/basic-component/index.html +++ b/en/docs/1.0/kubeflow/basic-component/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ We can write it in Python code like this.

    print(number)

    However, when this code is run, an error occurs and it does not work because the number that should be printed is not defined.

    As we saw in Kubeflow Concepts, values like number that are required in component content are defined in Config. In order to execute component content, the necessary Configs must be passed from the component wrapper.

    Component Wrapper

    Define a standalone Python function

    Now we need to create a component wrapper to be able to pass the required Configs.

    Without a separate Config, it will be like this when wrapped with a component wrapper.

    def print_and_return_number():
    print(number)
    return number

    Now we add the required Config for the content as an argument to the wrapper. However, it is not just writing the argument but also writing the type hint of the argument. When Kubeflow converts the pipeline into the Kubeflow format, it checks if the specified input and output types are matched in the connection between the components. If the format of the input required by the component does not match the output received from another component, the pipeline cannot be created.

    Now we complete the component wrapper by writing down the argument, its type and the type to be returned as follows.

    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    In Kubeflow, you can only use types that can be expressed in json as return values. The most commonly used and recommended types are as follows:

    • int
    • float
    • str

    If you want to return multiple values instead of a single value, you must use collections.namedtuple.
    For more details, please refer to the Kubeflow official documentation Kubeflow Official Documentation.
    For example, if you want to write a component that returns the quotient and remainder of a number when divided by 2, it should be written as follows.

    from typing import NamedTuple


    def divide_and_return_number(
    number: int,
    ) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):
    from collections import namedtuple

    quotient, remainder = divmod(number, 2)
    print("quotient is", quotient)
    print("remainder is", remainder)

    divide_outputs = namedtuple(
    "DivideOutputs",
    [
    "quotient",
    "remainder",
    ],
    )
    return divide_outputs(quotient, remainder)

    Convert to Kubeflow Format

    Now you have to convert the written component into a format that can be used in Kubeflow. The conversion can be done through kfp.components.create_component_from_func. This converted form can be imported as a function in Python and used in the pipeline.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    Share component with yaml file

    If it is not possible to share with Python code, you can share components with a YAML file and use them. -To do this, first convert the component to a YAML file and then use it in the pipeline with kfp.components.load_component_from_file.

    First, let's explain the process of converting the written component to a YAML file.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    if __name__ == "__main__":
    print_and_return_number.component_spec.save("print_and_return_number.yaml")

    If you run the Python code you wrote, a file called print_and_return_number.yaml will be created. When you check the file, it will be as follows.

    name: Print and return number
    inputs:
    - {name: number, type: Integer}
    outputs:
    - {name: Output, type: Integer}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    args:
    - --number
    - {inputValue: number}
    - '----output-paths'
    - {outputPath: Output}

    Now the generated file can be shared and used in the pipeline as follows.

    from kfp.components import load_component_from_file

    print_and_return_number = load_component_from_file("print_and_return_number.yaml")

    How Kubeflow executes component

    In Kubeflow, the execution order of components is as follows:

    1. docker pull <image>: Pull the image containing the execution environment information of the defined component.
    2. Run command: Execute the component's content within the pulled image.

    Taking print_and_return_number.yaml as an example, the default image in @create_component_from_func is python:3.7, so the component's content will be executed based on that image.

    1. docker pull python:3.7
    2. print(number)

    References:

    - +To do this, first convert the component to a YAML file and then use it in the pipeline with kfp.components.load_component_from_file.

    First, let's explain the process of converting the written component to a YAML file.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    if __name__ == "__main__":
    print_and_return_number.component_spec.save("print_and_return_number.yaml")

    If you run the Python code you wrote, a file called print_and_return_number.yaml will be created. When you check the file, it will be as follows.

    name: Print and return number
    inputs:
    - {name: number, type: Integer}
    outputs:
    - {name: Output, type: Integer}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    args:
    - --number
    - {inputValue: number}
    - '----output-paths'
    - {outputPath: Output}

    Now the generated file can be shared and used in the pipeline as follows.

    from kfp.components import load_component_from_file

    print_and_return_number = load_component_from_file("print_and_return_number.yaml")

    How Kubeflow executes component

    In Kubeflow, the execution order of components is as follows:

    1. docker pull <image>: Pull the image containing the execution environment information of the defined component.
    2. Run command: Execute the component's content within the pulled image.

    Taking print_and_return_number.yaml as an example, the default image in @create_component_from_func is python:3.7, so the component's content will be executed based on that image.

    1. docker pull python:3.7
    2. print(number)

    References:

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/basic-pipeline-upload/index.html b/en/docs/1.0/kubeflow/basic-pipeline-upload/index.html index 398acd1a..db2b2ba6 100644 --- a/en/docs/1.0/kubeflow/basic-pipeline-upload/index.html +++ b/en/docs/1.0/kubeflow/basic-pipeline-upload/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    Version: 1.0

    6. Pipeline - Upload

    Upload Pipeline

    Now, let's upload the pipeline we created directly to kubeflow.
    Pipeline uploads can be done through the kubeflow dashboard UI. Use the method used in Install Kubeflow to do port forwarding.

    kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

    Access http://localhost:8080 to open the dashboard.

    1. Click Pipelines Tab

    pipeline-gui-0.png

    2. Click Upload Pipeline

    pipeline-gui-1.png

    3. Click Choose file

    pipeline-gui-2.png

    4. Upload created yaml file

    pipeline-gui-3.png

    5. Create

    pipeline-gui-4.png

    Upload Pipeline Version

    The uploaded pipeline allows you to manage versions through uploads. However, it serves the role of gathering pipelines with the same name rather than version management at the code level, such as Github. -In the example above, clicking on example_pipeline will bring up the following screen.

    pipeline-gui-5.png

    If you click this screen shows.

    pipeline-gui-4.png

    If you click Upload Version, a screen appears where you can upload the pipeline.

    pipeline-gui-6.png

    Now, upload your pipeline.

    pipeline-gui-7.png

    Once uploaded, you can check the pipeline version as follows.

    pipeline-gui-8.png

    - +In the example above, clicking on example_pipeline will bring up the following screen.

    pipeline-gui-5.png

    If you click this screen shows.

    pipeline-gui-4.png

    If you click Upload Version, a screen appears where you can upload the pipeline.

    pipeline-gui-6.png

    Now, upload your pipeline.

    pipeline-gui-7.png

    Once uploaded, you can check the pipeline version as follows.

    pipeline-gui-8.png

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/basic-pipeline/index.html b/en/docs/1.0/kubeflow/basic-pipeline/index.html index 9d747d07..17cd0e5e 100644 --- a/en/docs/1.0/kubeflow/basic-pipeline/index.html +++ b/en/docs/1.0/kubeflow/basic-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ The return value of the stored number_1_result can be used through number_1_resulst.output.

    Multi Output

    In the example above, the components return a single value, so it can be directly used with output.
    However, if there are multiple return values, they will be stored in outputs as a dictionary. You can use the keys to access the desired return values. Let's consider an example with a component that returns multiple values, like the one mentioned in the component definition. The divide_and_return_number component returns quotient and remainder. Here's an example of passing these two values to print_and_return_number:

    def multi_pipeline():
    divided_result = divde_and_return_number(number)
    num_1_result = print_and_return_number(divided_result.outputs["quotient"])
    num_2_result = print_and_return_number(divided_result.outputs["remainder"])

    Store the result of divide_and_return_number in divided_result and you can get the values of each by divided_result.outputs["quotient"] and divided_result.outputs["remainder"].

    Write to python code

    Now, let's get back to the main topic and pass the result of these two values to sum_and_print_numbers.

    def example_pipeline():
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    Next, gather the necessary Configs for each component and define it as a pipeline Config.

    def example_pipeline(number_1: int, number_2:int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    Convert to Kubeflow Format

    Finally, convert it into a format that can be used in Kubeflow. The conversion can be done using the kfp.dsl.pipeline function.

    from kfp.dsl import pipeline


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    In order to run a pipeline in Kubeflow, it needs to be compiled into the designated yaml format as only yaml format is possible, so the created pipeline needs to be compiled into a specific yaml format. -Compilation can be done using the following command.

    if __name__ == "__main__":
    import kfp
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    Conclusion

    As explained earlier, if we gather the content into a Python code, it will look like this.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The compiled result is as follows.

    example_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: example-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
    "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
    spec:
    entrypoint: example-pipeline
    templates:
    - name: example-pipeline
    inputs:
    parameters:
    - {name: number_1}
    - {name: number_2}
    dag:
    tasks:
    - name: print-and-return-number
    template: print-and-return-number
    arguments:
    parameters:
    - {name: number_1, value: '{{inputs.parameters.number_1}}'}
    - name: print-and-return-number-2
    template: print-and-return-number-2
    arguments:
    parameters:
    - {name: number_2, value: '{{inputs.parameters.number_2}}'}
    - name: sum-and-print-numbers
    template: sum-and-print-numbers
    dependencies: [print-and-return-number, print-and-return-number-2]
    arguments:
    parameters:
    - {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
    - {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
    - name: print-and-return-number
    container:
    args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_1}
    outputs:
    parameters:
    - name: print-and-return-number-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_1}}"}'}
    - name: print-and-return-number-2
    container:
    args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_2}
    outputs:
    parameters:
    - name: print-and-return-number-2-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_2}}"}'}
    - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: print-and-return-number-2-Output}
    - {name: print-and-return-number-Output}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
    "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
    argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
    description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
    type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
    dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
    {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
    pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
    "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
    arguments:
    parameters:
    - {name: number_1}
    - {name: number_2}
    serviceAccountName: pipeline-runner
    - +Compilation can be done using the following command.

    if __name__ == "__main__":
    import kfp
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    Conclusion

    As explained earlier, if we gather the content into a Python code, it will look like this.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The compiled result is as follows.

    example_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: example-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
    "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
    spec:
    entrypoint: example-pipeline
    templates:
    - name: example-pipeline
    inputs:
    parameters:
    - {name: number_1}
    - {name: number_2}
    dag:
    tasks:
    - name: print-and-return-number
    template: print-and-return-number
    arguments:
    parameters:
    - {name: number_1, value: '{{inputs.parameters.number_1}}'}
    - name: print-and-return-number-2
    template: print-and-return-number-2
    arguments:
    parameters:
    - {name: number_2, value: '{{inputs.parameters.number_2}}'}
    - name: sum-and-print-numbers
    template: sum-and-print-numbers
    dependencies: [print-and-return-number, print-and-return-number-2]
    arguments:
    parameters:
    - {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
    - {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
    - name: print-and-return-number
    container:
    args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_1}
    outputs:
    parameters:
    - name: print-and-return-number-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_1}}"}'}
    - name: print-and-return-number-2
    container:
    args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_2}
    outputs:
    parameters:
    - name: print-and-return-number-2-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_2}}"}'}
    - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: print-and-return-number-2-Output}
    - {name: print-and-return-number-Output}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
    "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
    argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
    description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
    type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
    dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
    {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
    pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
    "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
    arguments:
    parameters:
    - {name: number_1}
    - {name: number_2}
    serviceAccountName: pipeline-runner
    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/basic-requirements/index.html b/en/docs/1.0/kubeflow/basic-requirements/index.html index 6377425b..6525758a 100644 --- a/en/docs/1.0/kubeflow/basic-requirements/index.html +++ b/en/docs/1.0/kubeflow/basic-requirements/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    3. Install Requirements

    The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to Appendix 1. Python Virtual Environment and install the packages on the client node.

    The packages and versions required for the practice are as follows:

    • requirements.txt

      kfp==1.8.9
      scikit-learn==1.0.1
      mlflow==1.21.0
      pandas==1.3.4
      dill==0.3.4

    Activate the Python virtual environment created in the previous section.

    pyenv activate demo

    We are proceeding with the package installation.

    pip3 install -U pip
    pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
    - +
    Version: 1.0

    3. Install Requirements

    The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to Appendix 1. Python Virtual Environment and install the packages on the client node.

    The packages and versions required for the practice are as follows:

    • requirements.txt

      kfp==1.8.9
      scikit-learn==1.0.1
      mlflow==1.21.0
      pandas==1.3.4
      dill==0.3.4

    Activate the Python virtual environment created in the previous section.

    pyenv activate demo

    We are proceeding with the package installation.

    pip3 install -U pip
    pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/basic-run/index.html b/en/docs/1.0/kubeflow/basic-run/index.html index bc8f862b..f456f1b3 100644 --- a/en/docs/1.0/kubeflow/basic-run/index.html +++ b/en/docs/1.0/kubeflow/basic-run/index.html @@ -7,14 +7,14 @@ - +
    Version: 1.0

    7. Pipeline - Run

    Run Pipeline

    Now we will run the uploaded pipeline.

    Before Run

    1. Create Experiment

    Experiments in Kubeflow are units that logically manage runs executed within them.

    When you first enter the namespace in Kubeflow, there are no Experiments created. Therefore, you must create an Experiment beforehand in order to run the pipeline. If an Experiment already exists, you can go to Run Pipeline.

    Experiments can be created via the Create Experiment button.

    run-0.png

    2. Name 입력

    run-1.png

    Run Pipeline

    1. Select Create Run

    run-2.png

    2. Select Experiment

    run-9.png

    run-10.png

    3. Enter Pipeline Config

    Fill in the values of the Config provided when creating the pipeline. The uploaded pipeline requires input values for number_1 and number_2.

    run-3.png

    4. Start

    Click the Start button after entering the values. The pipeline will start running.

    run-4.png

    Run Result

    The executed pipelines can be viewed in the Runs tab. -Clicking on a run provides detailed information related to the executed pipeline.

    run-5.png

    Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray.

    run-6.png

    When a component has completed execution, it is marked with a green checkmark.

    run-7.png

    If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5).

    run-8.png

    - +Clicking on a run provides detailed information related to the executed pipeline.

    run-5.png

    Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray.

    run-6.png

    When a component has completed execution, it is marked with a green checkmark.

    run-7.png

    If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5).

    run-8.png

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/how-to-debug/index.html b/en/docs/1.0/kubeflow/how-to-debug/index.html index 06ad755f..8f2fc4fd 100644 --- a/en/docs/1.0/kubeflow/how-to-debug/index.html +++ b/en/docs/1.0/kubeflow/how-to-debug/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Let's investigate what might be the problem.

    First, click on the component and go to the Input/Output tab to download the input data.
    You can click on the link indicated by the red square to download the data.

    debug-5.png

    Download both files to the same location. Then navigate to the specified path and check the downloaded files.

    ls

    There are two files as follows.

    drop-na-from-csv-output.tgz load-iris-data-target.tgz

    I will try to unzip it.

    tar -xzvf load-iris-data-target.tgz ; mv data target.csv
    tar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv

    And then run the component code using a Jupyter notebook. debug-3.png

    Debugging revealed that dropping the data was based on rows instead of columns, resulting in all the data being removed. -Now that we know the cause of the problem, we can modify the component to drop based on columns.

    @partial(
    create_component_from_func,
    packages_to_install=["pandas"],
    )
    def drop_na_from_csv(
    data_path: InputPath("csv"),
    output_path: OutputPath("csv"),
    ):
    import pandas as pd

    data = pd.read_csv(data_path)
    data = data.dropna(axis="columns")
    data.to_csv(output_path, index=False)

    After modifying, upload the pipeline again and run it to confirm that it is running normally as follows.

    debug-6.png

    - +Now that we know the cause of the problem, we can modify the component to drop based on columns.

    @partial(
    create_component_from_func,
    packages_to_install=["pandas"],
    )
    def drop_na_from_csv(
    data_path: InputPath("csv"),
    output_path: OutputPath("csv"),
    ):
    import pandas as pd

    data = pd.read_csv(data_path)
    data = data.dropna(axis="columns")
    data.to_csv(output_path, index=False)

    After modifying, upload the pipeline again and run it to confirm that it is running normally as follows.

    debug-6.png

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/kubeflow-concepts/index.html b/en/docs/1.0/kubeflow/kubeflow-concepts/index.html index a9d9377e..acce9f06 100644 --- a/en/docs/1.0/kubeflow/kubeflow-concepts/index.html +++ b/en/docs/1.0/kubeflow/kubeflow-concepts/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ Next, Python Code w\ Config is where the given Config is used to actually perform the training.
    Finally, there is a process to save the artifacts.

    Component Wrapper

    Component wrappers deliver the necessary Config and execute tasks for component content.

    concept-3.png

    In Kubeflow, component wrappers are defined as functions, similar to the train_svc_from_csv example above. When a component wrapper wraps the contents, it looks like the following:

    concept-4.png

    Artifacts

    In the explanation above, it was mentioned that the component creates Artifacts. Artifacts is a term used to refer to any form of a file that is generated, such as evaluation results, logs, etc. -Of the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc.

    concept-5.png

    • Model
    • Data
    • Metric
    • etc

    Model

    We defined the model as follows:

    A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.

    Data

    Data includes preprocessed features, model predictions, etc.

    Metric

    Metric is divided into two categories: dynamic metrics and static metrics.

    • Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch.
    • Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.

    Pipeline

    A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements.

    concept-6.png

    Pipeline Config

    As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline.

    concept-7.png

    Run

    To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."

    concept-8.png

    When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored.

    concept-9.png

    Now, let's learn how to write components and pipelines.

    - +Of the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc.

    concept-5.png

    • Model
    • Data
    • Metric
    • etc

    Model

    We defined the model as follows:

    A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.

    Data

    Data includes preprocessed features, model predictions, etc.

    Metric

    Metric is divided into two categories: dynamic metrics and static metrics.

    • Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch.
    • Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.

    Pipeline

    A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements.

    concept-6.png

    Pipeline Config

    As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline.

    concept-7.png

    Run

    To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."

    concept-8.png

    When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored.

    concept-9.png

    Now, let's learn how to write components and pipelines.

    + \ No newline at end of file diff --git a/en/docs/1.0/kubeflow/kubeflow-intro/index.html b/en/docs/1.0/kubeflow/kubeflow-intro/index.html index 0de207b6..c8678929 100644 --- a/en/docs/1.0/kubeflow/kubeflow-intro/index.html +++ b/en/docs/1.0/kubeflow/kubeflow-intro/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    1. Kubeflow Introduction

    To use Kubeflow, you need to write components and pipelines.

    The approach described in MLOps for ALL differs slightly from the method described on the Kubeflow Pipeline official website. Here, Kubeflow Pipeline is used as one of the components in the elements that make up MLOps rather than a standalone workflow.

    Now, let's understand what components and pipelines are and how to write them.

    - +
    Version: 1.0

    1. Kubeflow Introduction

    To use Kubeflow, you need to write components and pipelines.

    The approach described in MLOps for ALL differs slightly from the method described on the Kubeflow Pipeline official website. Here, Kubeflow Pipeline is used as one of the components in the elements that make up MLOps rather than a standalone workflow.

    Now, let's understand what components and pipelines are and how to write them.

    + \ No newline at end of file diff --git a/en/docs/1.0/prerequisites/docker/advanced/index.html b/en/docs/1.0/prerequisites/docker/advanced/index.html index b8210ea4..84b41db0 100644 --- a/en/docs/1.0/prerequisites/docker/advanced/index.html +++ b/en/docs/1.0/prerequisites/docker/advanced/index.html @@ -7,7 +7,7 @@ - + @@ -25,8 +25,8 @@ To address this problem, docker provides the -m option which allows you to limit the usage of CPU and memory when running the docker container.

    docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600
    docker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600

    After running the Docker above, you can check the usage through the 'docker stats' command.

    CONTAINER ID   NAME        CPU %     MEM USAGE / LIMIT   MEM %     NET I/O       BLOCK I/O   PIDS
    4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1
    4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1

    In Kubernetes, when you limit the CPU and memory resources of a pod resource, it is provided using this technique.

    docker run with restart policy

    If there is a need to keep a particular container running continuously, the --restart=always option is provided to try to re-create the container immediately after it is terminated.

    After entering the option, run the docker.

    docker run --restart=always ubuntu

    Run watch -n1 docker ps to check if it is restarting. If it is running normally, Restarting (0) will be printed in STATUS.

    CONTAINER ID   IMAGE     COMMAND   CREATED          STATUS                         PORTS     NAMES
    a911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan

    When specifying the restart option for a job resource in Kubernetes, this approach is used.

    Running docker run as a background process

    By default, when running a Docker container, it is executed as a foreground process. This means that the terminal that launched the container is automatically attached to it, preventing you from running other commands.

    Let's try an example. Open two terminals, and in one terminal, continuously monitor docker ps, while in the other terminal, execute the following commands one by one and observe the behavior.

    First Practice

    docker run -it ubuntu sleep 10

    You must remain stopped for 10 seconds and you cannot perform any other commands from that container. After 10 seconds, you can check in docker ps that the container has terminated.

    Second Practice

    docker run -it ubuntu sleep 10

    After that, press ctrl + p -> ctrl + q.

    Now you can perform other commands in that terminal, and you can also see that the container is still alive for up to 10 seconds with docker ps. This situation, where you exit from the Docker container, is called "detached". Docker provides an option to run containers in detached mode, which allows you to run the container in the background while executing the run command.

    Third Practice

    docker run -d ubuntu sleep 10

    In detached mode, you can perform other actions in the terminal that executed the command.

    It is good to use detached mode appropriately according to the situation.
    For example, when developing a backend API server that communicates with the DB, the backend API server needs to be constantly checked with hot-loading while changing the source code, but the DB does not need to be monitored, so it can be executed as follows.
    -Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs.

    References

    - +Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs.

    References

    + \ No newline at end of file diff --git a/en/docs/1.0/prerequisites/docker/command/index.html b/en/docs/1.0/prerequisites/docker/command/index.html index 178b1dff..f5e5fa34 100644 --- a/en/docs/1.0/prerequisites/docker/command/index.html +++ b/en/docs/1.0/prerequisites/docker/command/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ Even when stopped, the data used in the Docker remains in the container. So you can restart the container through restarting. But this process will use disk. So -in order to delete the containers that are not used at all, we should use the docker rm command.

    First, let's check the current containers.

    docker ps -a

    There are three containers as follows.

    CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
    730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

    Let's try to delete the 'demo3' container through the following command.

    docker rm demo3

    The command docker ps -a reduced it to two lines as follows.

    CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

    Delete the remaining containers as well.

    docker rm demo2
    docker rm demo1

    10. Docker rmi

    Command to delete a Docker image.

    docker rmi --help

    Use the following commands to check which images are currently on the local.

    docker images

    The following is output.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    busybox latest a8440bba1bc0 32 hours ago 1.41MB
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    I will try to delete the busybox image.

    docker rmi busybox

    If you type docker images again, the following will appear.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    References

    - +in order to delete the containers that are not used at all, we should use the docker rm command.

    First, let's check the current containers.

    docker ps -a

    There are three containers as follows.

    CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
    730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

    Let's try to delete the 'demo3' container through the following command.

    docker rm demo3

    The command docker ps -a reduced it to two lines as follows.

    CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

    Delete the remaining containers as well.

    docker rm demo2
    docker rm demo1

    10. Docker rmi

    Command to delete a Docker image.

    docker rmi --help

    Use the following commands to check which images are currently on the local.

    docker images

    The following is output.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    busybox latest a8440bba1bc0 32 hours ago 1.41MB
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    I will try to delete the busybox image.

    docker rmi busybox

    If you type docker images again, the following will appear.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    References

    + \ No newline at end of file diff --git a/en/docs/1.0/prerequisites/docker/images/index.html b/en/docs/1.0/prerequisites/docker/images/index.html index a87b33b2..5a8cb670 100644 --- a/en/docs/1.0/prerequisites/docker/images/index.html +++ b/en/docs/1.0/prerequisites/docker/images/index.html @@ -7,14 +7,14 @@ - +
    Version: 1.0

    [Practice] Docker images

    • docker commit
      • running container 를 docker image 로 만드는 방법
      • docker commit -m "message" -a "author" <container-id> <image-name>
      • docker commit 을 사용하면, 수동으로 Dockerfile 을 만들지 않고도 도커 이미지를 만들 수 있습니다.
        touch Dockerfile
    1. Move to the docker-practice folder.

    2. Create an empty file called Dockerfile.

    3. 이미지에 특정 패키지를 설치하는 명령어는 무엇입니까?

    Answer: RUN

    Translation: Let's look at the basic commands that can be used in Dockerfile one by one. FROM is a command that specifies which image to use as a base image for Dockerfile. When creating a Docker image, instead of creating the environment I intend from scratch, I can use a pre-made image such as python:3.9, python-3.9-alpine, etc. as the base and install pytorch and add my source code.

    FROM <image>[:<tag>] [AS <name>]

    # 예시
    FROM ubuntu
    FROM ubuntu:18.04
    FROM nginx:latest AS ngx

    The command to copy files or directories from the <src> path on the host (local) to the <dest> path inside the container.

    COPY <src>... <dest>

    # 예시
    COPY a.txt /some-directory/b.txt
    COPY my-directory /some-directory-2

    ADD is similar to COPY but it has additional features.

    # 1 - 호스트에 압축되어있는 파일을 풀면서 컨테이너 내부로 copy 할 수 있음
    ADD scripts.tar.gz /tmp
    # 2 - Remote URLs 에 있는 파일을 소스 경로로 지정할 수 있음
    ADD http://www.example.com/script.sh /tmp

    # 위 두 가지 기능을 사용하고 싶을 경우에만 COPY 대신 ADD 를 사용하는 것을 권장

    The command to run the specified command inside a Docker container. -Docker images maintain the state in which the commands are executed.

    RUN <command>
    RUN ["executable-command", "parameter1", "parameter2"]

    # 예시
    RUN pip install torch
    RUN pip install -r requirements.txt

    CMD specifies a command that the Docker container will run when it starts. There is a similar command called ENTRYPOINT. The difference between them will be discussed later. Note that only one CMD can be run in one Docker image, which is different from RUN command.

    CMD <command>
    CMD ["executable-command", "parameter1", "parameter2"]
    CMD ["parameter1", "parameter2"] # ENTRYPOINT 와 함께 사용될 때

    # 예시
    CMD python main.py

    WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created.

    WORKDIR /path/to/workdir

    # 예시
    WORKDIR /home/demo
    RUN pwd # /home/demo 가 출력됨

    This is a command to set the value of environment variables that will be used continuously inside the container.

    ENV <KEY> <VALUE>
    ENV <KEY>=<VALUE>

    # 예시
    # default 언어 설정
    RUN locale-gen ko_KR.UTF-8
    ENV LANG ko_KR.UTF-8
    ENV LANGUAGE ko_KR.UTF-8
    ENV LC_ALL ko_KR.UTF-8

    You can specify the port/protocol to be opened from the container. If <protocol> is not specified, TCP is set as the default.

    EXPOSE <port>
    EXPOSE <port>/<protocol>

    # 예시
    EXPOSE 8080

    Write a simple Dockerfile by using vim Dockerfile or an editor like vscode and write the following:

    # base image 를 ubuntu 18.04 로 설정합니다.
    FROM ubuntu:18.04

    # apt-get update 명령을 실행합니다.
    RUN apt-get update

    # TEST env var의 값을 hello 로 지정합니다.
    ENV TEST hello

    # DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
    CMD echo $TEST

    Use the docker build command to create a Docker Image from a Dockerfile.

    docker build --help

    Run the following command from the path where the Dockerfile is located.

    docker build -t my-image:v1.0.0 .

    The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let's check if the image was built successfully.

    # grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
    docker images | grep my-image

    If performed normally, it will output as follows.

    my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

    Let's now run a docker container with the my-image:v1.0.0 image that we just built.

    docker run my-image:v1.0.0

    If performed normally, it will result in the following.

    hello

    Let's run a docker container and change the value of the TEST env var at the time of running the my-image:v1.0.0 image we just built.

    docker run -e TEST=bye my-image:v1.0.0

    If performed normally, it will be as follows.

    bye
    - +Docker images maintain the state in which the commands are executed.

    RUN <command>
    RUN ["executable-command", "parameter1", "parameter2"]

    # 예시
    RUN pip install torch
    RUN pip install -r requirements.txt

    CMD specifies a command that the Docker container will run when it starts. There is a similar command called ENTRYPOINT. The difference between them will be discussed later. Note that only one CMD can be run in one Docker image, which is different from RUN command.

    CMD <command>
    CMD ["executable-command", "parameter1", "parameter2"]
    CMD ["parameter1", "parameter2"] # ENTRYPOINT 와 함께 사용될 때

    # 예시
    CMD python main.py

    WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created.

    WORKDIR /path/to/workdir

    # 예시
    WORKDIR /home/demo
    RUN pwd # /home/demo 가 출력됨

    This is a command to set the value of environment variables that will be used continuously inside the container.

    ENV <KEY> <VALUE>
    ENV <KEY>=<VALUE>

    # 예시
    # default 언어 설정
    RUN locale-gen ko_KR.UTF-8
    ENV LANG ko_KR.UTF-8
    ENV LANGUAGE ko_KR.UTF-8
    ENV LC_ALL ko_KR.UTF-8

    You can specify the port/protocol to be opened from the container. If <protocol> is not specified, TCP is set as the default.

    EXPOSE <port>
    EXPOSE <port>/<protocol>

    # 예시
    EXPOSE 8080

    Write a simple Dockerfile by using vim Dockerfile or an editor like vscode and write the following:

    # base image 를 ubuntu 18.04 로 설정합니다.
    FROM ubuntu:18.04

    # apt-get update 명령을 실행합니다.
    RUN apt-get update

    # TEST env var의 값을 hello 로 지정합니다.
    ENV TEST hello

    # DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
    CMD echo $TEST

    Use the docker build command to create a Docker Image from a Dockerfile.

    docker build --help

    Run the following command from the path where the Dockerfile is located.

    docker build -t my-image:v1.0.0 .

    The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let's check if the image was built successfully.

    # grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
    docker images | grep my-image

    If performed normally, it will output as follows.

    my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

    Let's now run a docker container with the my-image:v1.0.0 image that we just built.

    docker run my-image:v1.0.0

    If performed normally, it will result in the following.

    hello

    Let's run a docker container and change the value of the TEST env var at the time of running the my-image:v1.0.0 image we just built.

    docker run -e TEST=bye my-image:v1.0.0

    If performed normally, it will be as follows.

    bye
    + \ No newline at end of file diff --git a/en/docs/1.0/prerequisites/docker/index.html b/en/docs/1.0/prerequisites/docker/index.html index cb3b5341..b176903d 100644 --- a/en/docs/1.0/prerequisites/docker/index.html +++ b/en/docs/1.0/prerequisites/docker/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    Version: 1.0

    What is Docker?

    Container

    • Containerization:
      • A technology that allows applications to be executed uniformly anywhere.
    • Container Image:
      • A collection of all the files required to run an application.
      • → Similar to a mold for making fish-shaped bread (Bungeoppang).
    • Container:
      • A single process that is executed based on a container image.
      • → A fish-shaped bread (Bungeoppang) produced using a mold.

    Docker

    Docker is a platform that allows you to manage and use containers.
    Its slogan is "Build Once, Run Anywhere," guaranteeing the same execution results anywhere.

    In the Docker, the resources for the container are separated and the lifecycle is controlled by Linux kernel's cgroups, etc.
    However, it is too difficult to use these interfaces directly, so an abstraction layer is created.

    docker-layer.png

    Through this, users can easily control containers with just the user-friendly API Docker CLI.

    • Users can easily control containers using the user-friendly API called Docker CLI.

    Interpretation of Layer

    The roles of the layers mentioned above are as follows:

    1. runC: Utilizes the functionality of the Linux kernel to isolate namespaces, CPUs, memory, filesystems, etc., for a container, which is a single process.
    2. containerd: Acts as an abstraction layer to communicate with runC (OCI layer) and uses the standardized interface (OCI).
    3. dockerd: Solely responsible for issuing commands to containerd.
    4. Docker CLI: Users only need to issue commands to dockerd (Docker daemon) using Docker CLI.
      • During this communication process, Unix socket is used, so sometimes Docker-related errors occur, such as "the /var/run/docker.sock is in use" or "insufficient permissions" error messages.

    Although Docker encompasses many stages, when the term "Docker" is used, it can refer to Docker CLI, Dockerd (Docker daemon), or even a single Docker container, which can lead to confusion.
    -In the upcoming text, the term "Docker" may be used in various contexts.

    For ML Engineer

    ML engineers use Docker for the following reasons:

    1. ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages.
    2. Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this.
    3. Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.
    - +In the upcoming text, the term "Docker" may be used in various contexts.

    For ML Engineer

    ML engineers use Docker for the following reasons:

    1. ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages.
    2. Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this.
    3. Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.
    + \ No newline at end of file diff --git a/en/docs/1.0/prerequisites/docker/install/index.html b/en/docs/1.0/prerequisites/docker/install/index.html index efede8f0..9f84ff0e 100644 --- a/en/docs/1.0/prerequisites/docker/install/index.html +++ b/en/docs/1.0/prerequisites/docker/install/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    Install Docker

    Docker

    To practice Docker, you need to install Docker.
    The Docker installation varies depending on which OS you are using.
    -Please refer to the official website for the Docker installation that fits your environment:

    Check Installation

    Check installation requires an OS, terminal environment where docker run hello-world runs correctly.

    OSDocker EngineTerminal
    MacOSDocker Desktopzsh
    WindowsDocker DesktopPowershell
    WindowsDocker DesktopWSL2
    UbuntuDocker Enginebash

    Before diving in..

    It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps.

    - +Please refer to the official website for the Docker installation that fits your environment:

    Check Installation

    Check installation requires an OS, terminal environment where docker run hello-world runs correctly.

    OSDocker EngineTerminal
    MacOSDocker Desktopzsh
    WindowsDocker DesktopPowershell
    WindowsDocker DesktopWSL2
    UbuntuDocker Enginebash

    Before diving in..

    It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps.

    + \ No newline at end of file diff --git a/en/docs/1.0/prerequisites/docker/introduction/index.html b/en/docs/1.0/prerequisites/docker/introduction/index.html index 69eefbfa..ed5f5378 100644 --- a/en/docs/1.0/prerequisites/docker/introduction/index.html +++ b/en/docs/1.0/prerequisites/docker/introduction/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ However, the initial Kubernetes included Docker Engine for container virtualization.
    Therefore, whenever the Docker version was updated, the interface of Docker Engine changed and Kubernetes was greatly affected.

    Open Container Initiative

    In order to alleviate such inconveniences, many groups interested in container technology such as Google have come together to start the Open Container Initiative (OCI) project to set standards for containers.
    Docker further separated its interface and developed Containerd, a Container Runtime that adheres to the OCI standard, and added an abstraction layer so that dockerd calls the API of Containerd.

    In accordance with this flow, Kubernetes also now supports not only Docker, but any Container Runtime that adheres to the OCI standard and the specified specifications with the Container Runtime Interface (CRI) specification, starting from version 1.5.

    CRI-O

    CRI-O is a container runtime developed by Red Hat, Intel, SUSE, and IBM, which adheres to the OCI standard + CRI specifications, specifically for Kubernetes.

    Current docker & kubernetes

    Currently, Docker and Kubernetes have been using Docker Engine as the default container runtime, but since Docker's API did not match the CRI specification (OCI follows), Kubernetes developed and supported a dockershim to make Docker's API compatible with CRI, (it was a huge burden for Kubernetes, not for Docker). This was deprecated from Kubernetes v1.20 and abandoned from v1.23.

    • v1.23 will be released in December 2021

    So from Kubernetes v1.23, you can no longer use Docker natively. -However, users are not much affected by this change because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of.

    References

    - +However, users are not much affected by this change because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of.

    References

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-components/install-components-kf/index.html b/en/docs/1.0/setup-components/install-components-kf/index.html index b865789d..c4bba144 100644 --- a/en/docs/1.0/setup-components/install-components-kf/index.html +++ b/en/docs/1.0/setup-components/install-components-kf/index.html @@ -7,7 +7,7 @@ - + @@ -24,8 +24,8 @@ virtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created

    Wait until one pod is Running.

    English: We will install the Profile Controller.

    kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -

    If performed normally, it will be outputted as follows.

    customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created
    serviceaccount/profiles-controller-service-account created
    role.rbac.authorization.k8s.io/profiles-leader-election-role created
    rolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created
    configmap/namespace-labels-data-48h7kd55mc created
    configmap/profiles-config-46c7tgh6fd created
    service/profiles-kfam created
    deployment.apps/profiles-deployment created
    virtualservice.networking.istio.io/profiles-kfam created

    Check to see if it is installed normally.

    kubectl get po -n kubeflow | grep profiles-deployment

    Wait until one pod is running.

    profiles-deployment-89f7d88b-qsnrd                       2/2     Running   0          42s

    Install the Volumes Web App.

    kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -

    If performed normally, it will be output as follows.

    serviceaccount/volumes-web-app-service-account created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created
    clusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created
    configmap/volumes-web-app-parameters-4gg8cm2gmk created
    service/volumes-web-app-service created
    deployment.apps/volumes-web-app-deployment created
    virtualservice.networking.istio.io/volumes-web-app-volumes-web-app created

    Check if it is installed normally.

    kubectl get po -n kubeflow | grep volumes-web-app

    Wait until one pod is running.

    volumes-web-app-deployment-8589d664cc-62svl              1/1     Running   0          27s
    Install Tensorboard Web App.

    Service account/tensorboards-web-app-service-account created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created, Cluster role binding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created, Config map/tensorboards-web-app-parameters-g28fbd6cch created, Service/tensorboards-web-app-service created, Deployment.apps/tensorboards-web-app-deployment created, and Virtual service.networking.istio.io/t
    Check if it is installed correctly.
    ```bash
    Deployment "tensorboard-web-app-deployment-6ff79b7f44-qbzmw" created
    deployment.apps/tensorboard-controller-controller-manager created

    A custom resource definition for 'tensorboards.tensorboard.kubeflow.org' was created, along with a service account, roles, role bindings, a config map, and a deployment for the controller manager metrics service. Check if the deployment.apps/tensorboard-controller-controller-manager was installed correctly. Wait for 1 pod to be Running. Translation: Installing Training Operator.

    kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -

    If performed normally, it will be output as follows.

    customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created
    serviceaccount/training-operator created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-view created
    clusterrole.rbac.authorization.k8s.io/training-operator created
    clusterrolebinding.rbac.authorization.k8s.io/training-operator created
    service/training-operator created
    deployment.apps/training-operator created

    Check to see if it has been installed normally.

    kubectl get po -n kubeflow | grep training-operator

    Wait until one pod is up and running.

    training-operator-7d98f9dd88-6887f                          1/1     Running   0          28s

    User Namespace

    For using Kubeflow, create a Kubeflow Profile for the User to be used.

    kustomize build common/user-namespace/base | kubectl apply -f -

    If performed normally, it will be outputted as follows.

    configmap/default-install-config-9h2h2b6hbk created
    profile.kubeflow.org/kubeflow-user-example-com created

    Confirm that the kubeflow-user-example-com profile has been created.

    kubectl get profile
    kubeflow-user-example-com   37s

    Check installation

    Confirm successful installation by port forwarding to access Kubeflow central dashboard with web browser.

    kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

    Open a web browser and connect to http://localhost:8080 to confirm that the following screen is displayed. -login-ui

    Enter the following connection information to connect.

    • Email Address: user@example.com
    • Password: 12341234

    central-dashboard

    - +login-ui

    Enter the following connection information to connect.

    • Email Address: user@example.com
    • Password: 12341234

    central-dashboard

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-components/install-components-mlflow/index.html b/en/docs/1.0/setup-components/install-components-mlflow/index.html index 3d8cfeb1..4ea86121 100644 --- a/en/docs/1.0/setup-components/install-components-mlflow/index.html +++ b/en/docs/1.0/setup-components/install-components-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ However, in order to separate it for kubeflow and mlflow purposes, we will create a mlflow-specific bucket.
    First, port-forward the minio-service to access Minio and create the bucket.

    kubectl port-forward svc/minio-service -n kubeflow 9000:9000

    Open a web browser and connect to localhost:9000 to display the following screen.

    minio-install

    Enter the following credentials to log in:

    • Username: minio
    • Password: minio123

    Click the + button on the right side bottom, then click Create Bucket.

    create-bucket

    Enter mlflow in Bucket Name to create the bucket.

    If successfully created, you will see a bucket named mlflow on the left. mlflow-bucket


    Let's Install MLflow Tracking Server

    Add Helm Repository

    helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts

    If the following message is displayed, it means it has been added successfully.

    "mlops-for-all" has been added to your repositories

    Update Helm Repository

    helm repo update

    If the following message is displayed, it means that the update has been successfully completed.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "mlops-for-all" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    Install mlflow-server Helm Chart version 0.2.0.

    helm install mlflow-server mlops-for-all/mlflow-server \
    --namespace mlflow-system \
    --version 0.2.0
    • The above Helm chart installs MLflow with the connection information for its backend store and artifacts store set to the default minio created during the Kubeflow installation process and the postgresql information created from the PostgreSQL DB installation above.
      • If you want to use a separate DB or object storage, please refer to the Helm Chart Repo and set the values separately during helm install.

    The following message should be displayed:

    NAME: mlflow-server
    LAST DEPLOYED: Sat Dec 18 22:02:13 2021
    NAMESPACE: mlflow-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    Check to see if it was installed normally.

    kubectl get pod -n mlflow-system | grep mlflow-server

    Wait until one mlflow-server related pod is running in the mlflow-system namespace.
    -If it is output similar to the following, then it has been successfully executed.

    mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

    Check installation

    Let's now check if we can successfully connect to the MLflow Server.

    First, we will perform port forwarding in order to connect from the client node.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open a web browser and connect to localhost:5000 and the following screen will be output.

    mlflow-install

    - +If it is output similar to the following, then it has been successfully executed.

    mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

    Check installation

    Let's now check if we can successfully connect to the MLflow Server.

    First, we will perform port forwarding in order to connect from the client node.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open a web browser and connect to localhost:5000 and the following screen will be output.

    mlflow-install

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-components/install-components-pg/index.html b/en/docs/1.0/setup-components/install-components-pg/index.html index c771c0ef..10199545 100644 --- a/en/docs/1.0/setup-components/install-components-pg/index.html +++ b/en/docs/1.0/setup-components/install-components-pg/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    4. Prometheus & Grafana

    Prometheus & Grafana

    Prometheus and Grafana are tools for monitoring.
    For stable service operation, it is necessary to continuously observe the status of the service and infrastructure where the service is operating, and to respond quickly based on the observed metrics when a problem arises.
    -Among the many tools to efficiently perform such monitoring, Everyone's MLOps will use open source Prometheus and Grafana.

    For more information, please refer to the Prometheus Official Documentation and Grafana Official Documentation.

    Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other.

    In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully.

    We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository.

    Add Helm Repository

    helm repo add seldonio https://storage.googleapis.com/seldon-charts

    If the following message is output, it means that it has been added successfully.

    "seldonio" has been added to your repositories

    Update Helm Repository

    helm repo update

    If the following message is displayed, it means that the update was successful.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "seldonio" chart repository
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    Install version 1.12.0 of the seldon-core-analytics Helm Chart.

    helm install seldon-core-analytics seldonio/seldon-core-analytics \
    --namespace seldon-system \
    --version 1.12.0

    The following message should be output.

    Skip...
    NAME: seldon-core-analytics
    LAST DEPLOYED: Tue Dec 14 18:29:38 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1

    Check to see if it was installed normally.

    kubectl get pod -n seldon-system | grep seldon-core-analytics

    Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace.

    seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
    seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
    seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
    seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
    seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
    seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

    Check installation

    Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Open the web browser and connect to localhost:8090, then the following screen will be displayed.

    grafana-install

    Enter the following connection information to connect.

    • Email or username: admin
    • Password: password

    When you log in, the following screen will be displayed.

    grafana-login

    Click the dashboard icon on the left and click the Manage button.

    dashboard-click

    You can see that the basic Grafana dashboard is included. Click the Prediction Analytics dashboard among them.

    dashboard

    The Seldon Core API Dashboard is visible and can be confirmed with the following output.

    seldon-dashboard

    References

    - +Among the many tools to efficiently perform such monitoring, Everyone's MLOps will use open source Prometheus and Grafana.

    For more information, please refer to the Prometheus Official Documentation and Grafana Official Documentation.

    Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other.

    In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully.

    We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository.

    Add Helm Repository

    helm repo add seldonio https://storage.googleapis.com/seldon-charts

    If the following message is output, it means that it has been added successfully.

    "seldonio" has been added to your repositories

    Update Helm Repository

    helm repo update

    If the following message is displayed, it means that the update was successful.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "seldonio" chart repository
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    Install version 1.12.0 of the seldon-core-analytics Helm Chart.

    helm install seldon-core-analytics seldonio/seldon-core-analytics \
    --namespace seldon-system \
    --version 1.12.0

    The following message should be output.

    Skip...
    NAME: seldon-core-analytics
    LAST DEPLOYED: Tue Dec 14 18:29:38 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1

    Check to see if it was installed normally.

    kubectl get pod -n seldon-system | grep seldon-core-analytics

    Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace.

    seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
    seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
    seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
    seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
    seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
    seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

    Check installation

    Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Open the web browser and connect to localhost:8090, then the following screen will be displayed.

    grafana-install

    Enter the following connection information to connect.

    • Email or username: admin
    • Password: password

    When you log in, the following screen will be displayed.

    grafana-login

    Click the dashboard icon on the left and click the Manage button.

    dashboard-click

    You can see that the basic Grafana dashboard is included. Click the Prediction Analytics dashboard among them.

    dashboard

    The Seldon Core API Dashboard is visible and can be confirmed with the following output.

    seldon-dashboard

    References

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-components/install-components-seldon/index.html b/en/docs/1.0/setup-components/install-components-seldon/index.html index 0f7f7b09..6019d493 100644 --- a/en/docs/1.0/setup-components/install-components-seldon/index.html +++ b/en/docs/1.0/setup-components/install-components-seldon/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    3. Seldon-Core

    Seldon-Core

    Seldon-Core is one of the open source frameworks that can deploy and manage numerous machine learning models in Kubernetes environments.
    For more details, please refer to the official product description page and GitHub of Seldon-Core and API Deployment part.

    Installing Seldon-Core

    In order to use Seldon-Core, modules such as Ambassador, which is responsible for Ingress of Kubernetes, and Istio are required here.
    -Seldon-Core officially supports only Ambassador and Istio, and MLOps for everyone will use Ambassador to use Seldon-core, so we will install Ambassador.

    Adding Ambassador to the Helm Repository

    helm repo add datawire https://www.getambassador.io

    If the following message is displayed, it means it has been added normally.

    "datawire" has been added to your repositories

    Update Ambassador - Helm Repository

    helm repo update

    If the following message is output, it means that the update has been completed normally.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Ambassador - Helm Install

    Install version 6.9.3 of the Ambassador Chart.

    helm install ambassador datawire/ambassador \
    --namespace seldon-system \
    --create-namespace \
    --set image.repository=quay.io/datawire/ambassador \
    --set enableAES=false \
    --set crds.keep=false \
    --version 6.9.3

    The following message should be displayed.

    생략...

    W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
    W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
    NAME: ambassador
    LAST DEPLOYED: Mon Dec 6 17:01:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    NOTES:
    -------------------------------------------------------------------------------
    Congratulations! You've successfully installed Ambassador!

    -------------------------------------------------------------------------------
    To get the IP address of Ambassador, run the following commands:
    NOTE: It may take a few minutes for the LoadBalancer IP to be available.
    You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

    On GKE/Azure:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

    On AWS:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

    echo http://$SERVICE_IP:

    For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

    Wait until four pods become running in the seldon-system.

    kubectl get pod -n seldon-system
    ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
    ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
    ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
    ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

    Seldon-Core - Helm Install

    Install version 1.11.2 of the seldon-core-operator Chart.

    helm install seldon-core seldon-core-operator \
    --repo https://storage.googleapis.com/seldon-charts \
    --namespace seldon-system \
    --set usageMetrics.enabled=true \
    --set ambassador.enabled=true \
    --version 1.11.2

    The following message should be displayed.

    Skip...

    W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
    NAME: seldon-core
    LAST DEPLOYED: Mon Dec 6 17:05:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    Wait until one seldon-controller-manager pod is Running in the seldon-system namespace.

    kubectl get pod -n seldon-system | grep seldon-controller
    seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

    References

    - +Seldon-Core officially supports only Ambassador and Istio, and MLOps for everyone will use Ambassador to use Seldon-core, so we will install Ambassador.

    Adding Ambassador to the Helm Repository

    helm repo add datawire https://www.getambassador.io

    If the following message is displayed, it means it has been added normally.

    "datawire" has been added to your repositories

    Update Ambassador - Helm Repository

    helm repo update

    If the following message is output, it means that the update has been completed normally.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Ambassador - Helm Install

    Install version 6.9.3 of the Ambassador Chart.

    helm install ambassador datawire/ambassador \
    --namespace seldon-system \
    --create-namespace \
    --set image.repository=quay.io/datawire/ambassador \
    --set enableAES=false \
    --set crds.keep=false \
    --version 6.9.3

    The following message should be displayed.

    생략...

    W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
    W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
    NAME: ambassador
    LAST DEPLOYED: Mon Dec 6 17:01:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    NOTES:
    -------------------------------------------------------------------------------
    Congratulations! You've successfully installed Ambassador!

    -------------------------------------------------------------------------------
    To get the IP address of Ambassador, run the following commands:
    NOTE: It may take a few minutes for the LoadBalancer IP to be available.
    You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

    On GKE/Azure:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

    On AWS:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

    echo http://$SERVICE_IP:

    For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

    Wait until four pods become running in the seldon-system.

    kubectl get pod -n seldon-system
    ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
    ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
    ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
    ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

    Seldon-Core - Helm Install

    Install version 1.11.2 of the seldon-core-operator Chart.

    helm install seldon-core seldon-core-operator \
    --repo https://storage.googleapis.com/seldon-charts \
    --namespace seldon-system \
    --set usageMetrics.enabled=true \
    --set ambassador.enabled=true \
    --version 1.11.2

    The following message should be displayed.

    Skip...

    W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
    NAME: seldon-core
    LAST DEPLOYED: Mon Dec 6 17:05:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    Wait until one seldon-controller-manager pod is Running in the seldon-system namespace.

    kubectl get pod -n seldon-system | grep seldon-controller
    seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

    References

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html b/en/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html index 64cd3329..616ada72 100644 --- a/en/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html +++ b/en/docs/1.0/setup-kubernetes/install-kubernetes-module/index.html @@ -7,14 +7,14 @@ - +
    Version: 1.0

    5. Install Kubernetes Modules

    Setup Kubernetes Modules

    On this page, we will explain how to install the modules that will be used on the cluster from the client nodes.
    -All the processes introduced here will be done on the client nodes.

    Helm

    Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once.

    1. Download Helm version 3.7.1 into the current folder.
    • For Linux amd64

      wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
    • Other OS refer to the official website for the download path of the binary that matches the OS and CPU of your client node.

    1. Unzip the file to use helm and move the file to its desired location.

      tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
      sudo mv linux-amd64/helm /usr/local/bin/helm
    2. Check to see if the installation was successful:

      helm help

      If you see the following message, it means that it has been installed normally.

      The Kubernetes package manager

      Common actions for Helm:
    • helm search: search for charts

    • helm pull: download a chart to your local directory to view

    • helm install: upload the chart to Kubernetes

    • helm list: list releases of charts

      Environment variables:

      NameDescription
      $HELM_CACHE_HOMEset an alternative location for storing cached files.
      $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
      $HELM_DATA_HOMEset an alternative location for storing Helm data.

      ...


    Kustomize

    Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once.

    1. Download the binary version of kustomize v3.10.0 in the current folder.
    • For Linux amd64

      wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
    • Other OS can be downloaded from kustomize/v3.10.0 after checking.

    1. Unzip to use kustomize, and change the file location.

      tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
      sudo mv kustomize /usr/local/bin/kustomize
    2. Check if it is installed correctly.

      kustomize help

      If you see the following message, it means that it has been installed normally.

      Manages declarative configuration of Kubernetes.
      See https://sigs.k8s.io/kustomize

      Usage:
      kustomize [command]

      Available Commands:
      build Print configuration per contents of kustomization.yaml
      cfg Commands for reading and writing configuration.
      completion Generate shell completion script
      create Create a new kustomization in the current directory
      edit Edits a kustomization file
      fn Commands for running functions against configuration.
      ...

    CSI Plugin : Local Path Provisioner

    1. The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters.

      kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

      If you see the following messages, it means that the installation was successful:

      namespace/local-path-storage created
      serviceaccount/local-path-provisioner-service-account created
      clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
      clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
      deployment.apps/local-path-provisioner created
      storageclass.storage.k8s.io/local-path created
      configmap/local-path-config created
    2. Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:

      kubectl -n local-path-storage get pod

    If successful, it will display the following output:

    NAME                                     READY     STATUS    RESTARTS   AGE
    local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
    1. Execute the following command to change the default storage class:

      kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

      If the command is successful, the following output will be displayed:

      storageclass.storage.k8s.io/local-path patched
    2. Verify that the default storage class has been set:

      kubectl get sc

      Check if there is a storage class with the name local-path (default) in the NAME column:

      NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
      local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
    - +All the processes introduced here will be done on the client nodes.

    Helm

    Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once.

    1. Download Helm version 3.7.1 into the current folder.
    • For Linux amd64

      wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
    • Other OS refer to the official website for the download path of the binary that matches the OS and CPU of your client node.

    1. Unzip the file to use helm and move the file to its desired location.

      tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
      sudo mv linux-amd64/helm /usr/local/bin/helm
    2. Check to see if the installation was successful:

      helm help

      If you see the following message, it means that it has been installed normally.

      The Kubernetes package manager

      Common actions for Helm:
    • helm search: search for charts

    • helm pull: download a chart to your local directory to view

    • helm install: upload the chart to Kubernetes

    • helm list: list releases of charts

      Environment variables:

      NameDescription
      $HELM_CACHE_HOMEset an alternative location for storing cached files.
      $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
      $HELM_DATA_HOMEset an alternative location for storing Helm data.

      ...


    Kustomize

    Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once.

    1. Download the binary version of kustomize v3.10.0 in the current folder.
    • For Linux amd64

      wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
    • Other OS can be downloaded from kustomize/v3.10.0 after checking.

    1. Unzip to use kustomize, and change the file location.

      tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
      sudo mv kustomize /usr/local/bin/kustomize
    2. Check if it is installed correctly.

      kustomize help

      If you see the following message, it means that it has been installed normally.

      Manages declarative configuration of Kubernetes.
      See https://sigs.k8s.io/kustomize

      Usage:
      kustomize [command]

      Available Commands:
      build Print configuration per contents of kustomization.yaml
      cfg Commands for reading and writing configuration.
      completion Generate shell completion script
      create Create a new kustomization in the current directory
      edit Edits a kustomization file
      fn Commands for running functions against configuration.
      ...

    CSI Plugin : Local Path Provisioner

    1. The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters.

      kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

      If you see the following messages, it means that the installation was successful:

      namespace/local-path-storage created
      serviceaccount/local-path-provisioner-service-account created
      clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
      clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
      deployment.apps/local-path-provisioner created
      storageclass.storage.k8s.io/local-path created
      configmap/local-path-config created
    2. Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:

      kubectl -n local-path-storage get pod

    If successful, it will display the following output:

    NAME                                     READY     STATUS    RESTARTS   AGE
    local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
    1. Execute the following command to change the default storage class:

      kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

      If the command is successful, the following output will be displayed:

      storageclass.storage.k8s.io/local-path patched
    2. Verify that the default storage class has been set:

      kubectl get sc

      Check if there is a storage class with the name local-path (default) in the NAME column:

      NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
      local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html b/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html index bb1b1aa6..4dac02c6 100644 --- a/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html +++ b/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ However, we need to use docker as the backend to use GPU, so we will install the backend with the --docker option.

    curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker

    After installing k3s, check the k3s config.

    sudo cat /etc/rancher/k3s/k3s.yaml

    If installed correctly, the following items will be output. (Security related keys are hidden with <...>.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://127.0.0.1:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    2. Setup Kubernetes Cluster

    Set up the Kubernetes cluster by copying the k3s config to be used as the cluster’s kubeconfig.

    mkdir .kube
    sudo cp /etc/rancher/k3s/k3s.yaml .kube/config

    Grant user access permission to the copied config file.

    sudo chown $USER:$USER .kube/config

    3. Setup Kubernetes Client

    Now move the kubeconfig configured in the cluster to the local. Set the path to ~/.kube/config on the local.

    The config file copied at first has the server ip set to https://127.0.0.1:6443. Modify this value to match the ip of the cluster. -(We modified it to https://192.168.0.19:6443 to match the ip of the cluster used in this page.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://192.168.0.19:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If you see the following message, it means that the installation was successful.

    NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

    6. References

    - +(We modified it to https://192.168.0.19:6443 to match the ip of the cluster used in this page.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://192.168.0.19:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If you see the following message, it means that the installation was successful.

    NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

    6. References

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html b/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html index fb642b0f..8da3dc34 100644 --- a/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html +++ b/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: 1.0

    4.3. Kubeadm

    1. Prerequisite

    Before building a Kubernetes cluster, install the necessary components to the cluster.

    Please refer to Install Prerequisite and install the necessary components to the cluster.

    Change the configuration of the network for Kubernetes.

    sudo modprobe br_netfilter

    cat <<EOF | sudo tee /etc/modules-load.d/k8s.conf
    br_netfilter
    EOF

    cat <<EOF | sudo tee /etc/sysctl.d/k8s.conf
    net.bridge.bridge-nf-call-ip6tables = 1
    net.bridge.bridge-nf-call-iptables = 1
    EOF
    sudo sysctl --system

    2. Setup Kubernetes Cluster

    • kubeadm : Automates the installation process by registering kubelet as a service and issuing certificates for communication between cluster components.
    • kubelet : Container handler responsible for starting and stopping container resources.
    • kubectl : CLI tool used to interact with and manage Kubernetes clusters from the terminal.

    Install kubeadm, kubelet, and kubectl using the following commands. It's important to prevent accidental changes to the versions of these components, as it can lead to unexpected issues.

    sudo apt-get update
    sudo apt-get install -y apt-transport-https ca-certificates curl &&
    sudo curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg &&
    echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list &&
    sudo apt-get update
    sudo apt-get install -y kubelet=1.21.7-00 kubeadm=1.21.7-00 kubectl=1.21.7-00 &&
    sudo apt-mark hold kubelet kubeadm kubectl

    Check if kubeadm, kubelet, and kubectl are installed correctly.

    mlops@ubuntu:~$ kubeadm version
    kubeadm version: &version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:40:08Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    mlops@ubuntu:~$ kubelet --version
    Kubernetes v1.21.7
    mlops@ubuntu:~$ kubectl version --client
    Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}

    Now we will use kubeadm to install Kubernetes.

    kubeadm config images list
    kubeadm config images pull

    sudo kubeadm init --pod-network-cidr=10.244.0.0/16

    Through kubectl, copy the admin certificate to the path $HOME/.kube/config to control the Kubernetes cluster.

    mkdir -p $HOME/.kube
    sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
    sudo chown $(id -u):$(id -g) $HOME/.kube/config

    Install CNI. There are various kinds of CNI, which is responsible for setting up the network inside Kubernetes, and in MLOps for All, flannel is used.

    kubectl apply -f https://raw.githubusercontent.com/flannel-io/flannel/v0.13.0/Documentation/kube-flannel.yml

    There are two types of Kubernetes nodes: Master Node and Worker Node. For stability, it is generally recommended that only tasks to control the Kubernetes cluster are run on the Master Node, however this manual assumes a single cluster, so all types of tasks can be run on the Master Node.

    kubectl taint nodes --all node-role.kubernetes.io/master-

    3. Setup Kubernetes Client

    Copy the kubeconfig file created in the cluster to the client to control the cluster through kubectl.

    mkdir -p $HOME/.kube
    scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes

    When the node is in the "Ready" state, the output will be similar to the following:

    NAME     STATUS   ROLES                  AGE     VERSION
    ubuntu Ready control-plane,master 2m55s v1.21.7

    6. References

    - +
    Version: 1.0

    4.3. Kubeadm

    1. Prerequisite

    Before building a Kubernetes cluster, install the necessary components to the cluster.

    Please refer to Install Prerequisite and install the necessary components to the cluster.

    Change the configuration of the network for Kubernetes.

    sudo modprobe br_netfilter

    cat <<EOF | sudo tee /etc/modules-load.d/k8s.conf
    br_netfilter
    EOF

    cat <<EOF | sudo tee /etc/sysctl.d/k8s.conf
    net.bridge.bridge-nf-call-ip6tables = 1
    net.bridge.bridge-nf-call-iptables = 1
    EOF
    sudo sysctl --system

    2. Setup Kubernetes Cluster

    • kubeadm : Automates the installation process by registering kubelet as a service and issuing certificates for communication between cluster components.
    • kubelet : Container handler responsible for starting and stopping container resources.
    • kubectl : CLI tool used to interact with and manage Kubernetes clusters from the terminal.

    Install kubeadm, kubelet, and kubectl using the following commands. It's important to prevent accidental changes to the versions of these components, as it can lead to unexpected issues.

    sudo apt-get update
    sudo apt-get install -y apt-transport-https ca-certificates curl &&
    sudo curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg &&
    echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list &&
    sudo apt-get update
    sudo apt-get install -y kubelet=1.21.7-00 kubeadm=1.21.7-00 kubectl=1.21.7-00 &&
    sudo apt-mark hold kubelet kubeadm kubectl

    Check if kubeadm, kubelet, and kubectl are installed correctly.

    mlops@ubuntu:~$ kubeadm version
    kubeadm version: &version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:40:08Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    mlops@ubuntu:~$ kubelet --version
    Kubernetes v1.21.7
    mlops@ubuntu:~$ kubectl version --client
    Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}

    Now we will use kubeadm to install Kubernetes.

    kubeadm config images list
    kubeadm config images pull

    sudo kubeadm init --pod-network-cidr=10.244.0.0/16

    Through kubectl, copy the admin certificate to the path $HOME/.kube/config to control the Kubernetes cluster.

    mkdir -p $HOME/.kube
    sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
    sudo chown $(id -u):$(id -g) $HOME/.kube/config

    Install CNI. There are various kinds of CNI, which is responsible for setting up the network inside Kubernetes, and in MLOps for All, flannel is used.

    kubectl apply -f https://raw.githubusercontent.com/flannel-io/flannel/v0.13.0/Documentation/kube-flannel.yml

    There are two types of Kubernetes nodes: Master Node and Worker Node. For stability, it is generally recommended that only tasks to control the Kubernetes cluster are run on the Master Node, however this manual assumes a single cluster, so all types of tasks can be run on the Master Node.

    kubectl taint nodes --all node-role.kubernetes.io/master-

    3. Setup Kubernetes Client

    Copy the kubeconfig file created in the cluster to the client to control the cluster through kubectl.

    mkdir -p $HOME/.kube
    scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes

    When the node is in the "Ready" state, the output will be similar to the following:

    NAME     STATUS   ROLES                  AGE     VERSION
    ubuntu Ready control-plane,master 2m55s v1.21.7

    6. References

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html b/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html index c5ea56f8..62444c83 100644 --- a/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html +++ b/en/docs/1.0/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html @@ -7,7 +7,7 @@ - + @@ -25,8 +25,8 @@ users:
  • name: minikube user: client-certificate-data: LS0tLS1CRUdJTi.... -client-key-data: LS0tLS1CRUdJTiBSU0....

    1. Create the .kube folder on the client node:

      # Client node
      mkdir -p /home/$USER/.kube
    2. Paste the information obtained from Step 2 into the file and save it:

      vi /home/$USER/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If this message appears, it means that the installation has completed normally.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    - +client-key-data: LS0tLS1CRUdJTiBSU0....

    1. Create the .kube folder on the client node:

      # Client node
      mkdir -p /home/$USER/.kube
    2. Paste the information obtained from Step 2 into the file and save it:

      vi /home/$USER/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If this message appears, it means that the installation has completed normally.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/install-prerequisite/index.html b/en/docs/1.0/setup-kubernetes/install-prerequisite/index.html index 5daedb0a..436c4e61 100644 --- a/en/docs/1.0/setup-kubernetes/install-prerequisite/index.html +++ b/en/docs/1.0/setup-kubernetes/install-prerequisite/index.html @@ -7,14 +7,14 @@ - +
    Version: 1.0

    3. Install Prerequisite

    On this page, we describe the components that need to be installed or configured on the Cluster and Client prior to installing Kubernetes.

    Install apt packages

    In order to enable smooth communication between the Client and the Cluster, Port-Forwarding needs to be performed. To enable Port-Forwarding, the following packages need to be installed on the Cluster.

    sudo apt-get update
    sudo apt-get install -y socat

    Install Docker

    1. Install apt packages for docker.

      sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release
    2. add docker official GPG key.

      curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
    3. When installing Docker using the apt package manager, configure it to retrieve from the stable repository:

      echo \
      "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
      $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
    4. Check the currently available Docker versions for installation:

      sudo apt-get update && apt-cache madison docker-ce

      Verify if the version 5:20.10.11~3-0~ubuntu-focal is listed among the output:

      apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal

      If the addition was successful, the following output will be displayed:

      docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages
    5. Install Docker version 5:20.10.11~3-0~ubuntu-focal:

      sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal

    6. Check docker is installed.

      sudo docker run hello-world

    If added successfully, it will output as follows:

    mlops@ubuntu:~$ sudo docker run hello-world

    Hello from Docker!
    This message shows that your installation appears to be working correctly.

    To generate this message, Docker took the following steps:
    1. The Docker client contacted the Docker daemon.
    2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
    (amd64)
    3. The Docker daemon created a new container from that image which runs the
    executable that produces the output you are currently reading.
    4. The Docker daemon streamed that output to the Docker client, which sent it
    to your terminal.

    To try something more ambitious, you can run an Ubuntu container with:
    $ docker run -it ubuntu bash

    Share images, automate workflows, and more with a free Docker ID:
    https://hub.docker.com/

    For more examples and ideas, visit:
    https://docs.docker.com/get-started/
    1. Add permissions to use Docker commands without the sudo keyword by executing the following commands:

      sudo groupadd docker
      sudo usermod -aG docker $USER
      newgrp docker
    2. To verify that you can now use Docker commands without sudo, run the docker run command again:

      docker run hello-world

      If you see the following message after executing the command, it means that the permissions have been successfully added:

      mlops@ubuntu:~$ docker run hello-world

      Hello from Docker!
      This message shows that your installation appears to be working correctly.

      To generate this message, Docker took the following steps:
      1. The Docker client contacted the Docker daemon.
      2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
      (amd64)
      3. The Docker daemon created a new container from that image which runs the
      executable that produces the output you are currently reading.
      4. The Docker daemon streamed that output to the Docker client, which sent it
      to your terminal.

      To try something more ambitious, you can run an Ubuntu container with:
      $ docker run -it ubuntu bash

      Share images, automate workflows, and more with a free Docker ID:
      https://hub.docker.com/

      For more examples and ideas, visit:
      https://docs.docker.com/get-started/

    Turn off Swap Memory

    In order for kubelet to work properly, cluster nodes must turn off the virtual memory called swap. The following command turns off the swap.
    -(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)

    sudo sed -i '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab
    sudo swapoff -a

    Install Kubectl

    kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node.

    1. Download kubectl version v1.21.7 to the current folder:

      curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl
    2. Change the file permissions and move it to the appropriate location to make kubectl executable:

      sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
    3. Verify that kubectl is installed correctly:

      kubectl version --client

      If you see the following message, it means that kubectl is installed successfully:

      Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    4. If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:

    References

    - +(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)

    sudo sed -i '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab
    sudo swapoff -a

    Install Kubectl

    kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node.

    1. Download kubectl version v1.21.7 to the current folder:

      curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl
    2. Change the file permissions and move it to the appropriate location to make kubectl executable:

      sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
    3. Verify that kubectl is installed correctly:

      kubectl version --client

      If you see the following message, it means that kubectl is installed successfully:

      Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    4. If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:

    References

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/intro/index.html b/en/docs/1.0/setup-kubernetes/intro/index.html index 2bce439a..c8fa4d20 100644 --- a/en/docs/1.0/setup-kubernetes/intro/index.html +++ b/en/docs/1.0/setup-kubernetes/intro/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Therefore, if you are not using a cloud environment, you can install UTM, Virtual machines for Mac, to use virtual machines. (Purchasing and downloading software from the App Store is a form of donation-based payment. The free version is sufficient as it only differs in automatic updates.) This virtual machine software supports the Ubuntu 20.04.3 LTS practice operating system, enabling you to perform the exercises on an M1 Mac.

    However, since it is not possible to use all the elements described in the Components of MLOps, MLOps for ALL will mainly focus on installing the representative open source software and connecting them to each other.

    It is not meant that installing open source software in MLOps for ALL is a standard, and we recommend choosing the appropriate tool that fits your situation.

    Components

    The components of the MLOps system that we will make in this article and each version have been verified in the following environment.

    To facilitate smooth testing, I will explain the setup of the Cluster and Client as separate entities.

    The Cluster refers to a single desktop with Ubuntu installed.
    -The Client is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes.

    Cluster

    1. Software

    Below is the list of software that needs to be installed on the Cluster:

    SoftwareVersion
    Ubuntu20.04.3 LTS
    Docker (Server)20.10.11
    NVIDIA Driver470.86
    Kubernetesv1.21.7
    Kubeflowv1.4.0
    MLFlowv1.21.0

    2. Helm Chart

    Below is the list of third-party software that needs to be installed using Helm:

    Helm Chart Repo NameVersion
    datawire/ambassador6.9.3
    seldonio/seldon-core-operator1.11.2

    Client

    The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04.

    SoftwareVersion
    kubectlv1.21.7
    helmv3.7.1
    kustomizev3.10.0

    Minimum System Requirements

    It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:

    • CPU: 6 cores
    • RAM: 12GB
    • DISK: 50GB
    • GPU: NVIDIA GPU (optional)
    - +The Client is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes.

    Cluster

    1. Software

    Below is the list of software that needs to be installed on the Cluster:

    SoftwareVersion
    Ubuntu20.04.3 LTS
    Docker (Server)20.10.11
    NVIDIA Driver470.86
    Kubernetesv1.21.7
    Kubeflowv1.4.0
    MLFlowv1.21.0

    2. Helm Chart

    Below is the list of third-party software that needs to be installed using Helm:

    Helm Chart Repo NameVersion
    datawire/ambassador6.9.3
    seldonio/seldon-core-operator1.11.2

    Client

    The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04.

    SoftwareVersion
    kubectlv1.21.7
    helmv3.7.1
    kustomizev3.10.0

    Minimum System Requirements

    It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:

    • CPU: 6 cores
    • RAM: 12GB
    • DISK: 50GB
    • GPU: NVIDIA GPU (optional)
    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/kubernetes/index.html b/en/docs/1.0/setup-kubernetes/kubernetes/index.html index 7eec84d6..de60de31 100644 --- a/en/docs/1.0/setup-kubernetes/kubernetes/index.html +++ b/en/docs/1.0/setup-kubernetes/kubernetes/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    2. Setup Kubernetes

    Setup Kubernetes Cluster

    For those learning Kubernetes for the first time, the first barrier to entry is setting up a Kubernetes practice environment.

    The official tool that supports building a production-level Kubernetes cluster is kubeadm, but there are also tools such as kubespray and kops that help users set up more easily, and tools such as k3s, minikube, microk8s, and kind that help you set up a compact Kubernetes cluster easily for learning purposes.

    Each tool has its own advantages and disadvantages, so considering the preferences of each user, this article will use three tools: kubeadm, k3s, and minikube to set up a Kubernetes cluster. For detailed comparisons of each tool, please refer to the official Kubernetes documentation.

    MLOps for ALL recommends k3s as a tool that is easy to use when setting up a Kubernetes cluster.

    If you want to use all the features of Kubernetes and configure the nodes, we recommend kubeadm.
    -minikube has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe.

    In this MLOps for ALL, in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools.

    The scope of this Setup Kubernetes section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster.

    The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with.

    - +minikube has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe.

    In this MLOps for ALL, in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools.

    The scope of this Setup Kubernetes section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster.

    The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with.

    + \ No newline at end of file diff --git a/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html b/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html index 225db53e..df0cf514 100644 --- a/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html +++ b/en/docs/1.0/setup-kubernetes/setup-nvidia-gpu/index.html @@ -7,15 +7,15 @@ - +
    Version: 1.0

    6. (Optional) Setup GPU

    For using GPU in Kubernetes and Kubeflow, the following tasks are required.

    1. Install NVIDIA Driver

    If the following screen is output when executing nvidia-smi, please omit this step.

    mlops@ubuntu:~$ nvidia-smi 
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    | 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |
    | 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |
    | 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |
    +-----------------------------------------------------------------------------+

    If the output of nvidia-smi is not as above, please install the nvidia driver that fits your installed GPU.

    If you are not familiar with the installation of nvidia drivers, please install it through the following command.

    sudo add-apt-repository ppa:graphics-drivers/ppa
    sudo apt update && sudo apt install -y ubuntu-drivers-common
    sudo ubuntu-drivers autoinstall
    sudo reboot

    2. Install NVIDIA-Docker.

    Let's install NVIDIA-Docker.

    curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \
    sudo apt-key add -
    distribution=$(. /etc/os-release;echo $ID$VERSION_ID)
    curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list
    sudo apt-get update
    sudo apt-get install -y nvidia-docker2 &&
    sudo systemctl restart docker

    To check if it is installed correctly, we will run the docker container using the GPU.

    sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi

    If the following message appears, it means that the installation was successful:

    mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    +-----------------------------------------------------------------------------+

    3. Setting NVIDIA-Docker as the Default Container Runtime

    By default, Kubernetes uses Docker-CE as the default container runtime. To use NVIDIA GPU within Docker containers, you need to configure NVIDIA-Docker as the container runtime and modify the default runtime for creating pods.

    1. Open the /etc/docker/daemon.json file and make the following modifications:

      sudo vi /etc/docker/daemon.json

      {
      "default-runtime": "nvidia",
      "runtimes": {
      "nvidia": {
      "path": "nvidia-container-runtime",
      "runtimeArgs": []
      }
      }
      }
    2. After confirming the file changes, restart Docker.

      sudo systemctl daemon-reload
      sudo service docker restart
    3. Verify that the changes have been applied.

      sudo docker info | grep nvidia

      If you see the following message, it means that the installation was successful.

      mlops@ubuntu:~$ docker info | grep nvidia
      Runtimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc
      Default Runtime: nvidia

    4. Nvidia-Device-Plugin

    1. Create the nvidia-device-plugin daemonset.

      kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml
    2. Verify that the nvidia-device-plugin pod is in the RUNNING state.

      kubectl get pod -n kube-system | grep nvidia

    You should see the following output:

    kube-system   nvidia-device-plugin-daemonset-nlqh2   1/1     Running   0    1h
    1. Verify that the nodes have been configured to have GPUs available.

      kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\.com/gpu"

      If you see the following message, it means that the configuration was successful.
      (In the MLOps for ALL* tutorial cluster, there are two GPUs, so the output is 2. -If the output shows the correct number of GPUs for your cluster, it is fine.)

      NAME       GPU
      ubuntu 2

      If it is not configured, the GPU value will be displayed as <None>.

    - +If the output shows the correct number of GPUs for your cluster, it is fine.)

    NAME       GPU
    ubuntu 2

    If it is not configured, the GPU value will be displayed as <None>.

    + \ No newline at end of file diff --git a/en/docs/api-deployment/seldon-children/index.html b/en/docs/api-deployment/seldon-children/index.html index 3b7e7bc1..4ebc48a2 100644 --- a/en/docs/api-deployment/seldon-children/index.html +++ b/en/docs/api-deployment/seldon-children/index.html @@ -7,14 +7,14 @@ - +
    Version: Next

    6. Multi Models

    Previously, the methods explained were all targeted at a single model. On this page, we will look at how to connect multiple models.

    First, we will create a pipeline that creates two models. We will add a StandardScaler to the SVC model we used before and store it.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_scaler_from_csv(
    data_path: InputPath("csv"),
    scaled_data_path: OutputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    ):
    import dill
    import pandas as pd
    from sklearn.preprocessing import StandardScaler

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    data = pd.read_csv(data_path)

    scaler = StandardScaler()
    scaled_data = scaler.fit_transform(data)
    scaled_data = pd.DataFrame(scaled_data, columns=data.columns, index=data.index)

    scaled_data.to_csv(scaled_data_path, index=False)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(scaler, file_writer)

    input_example = data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(data, scaler.transform(data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["scikit-learn"],
    install_mlflow=False
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_svc_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["scikit-learn"],
    install_mlflow=False
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)
    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    from kfp.dsl import pipeline


    @pipeline(name="multi_model_pipeline")
    def multi_model_pipeline(kernel: str = "rbf"):
    iris_data = load_iris_data()
    scaled_data = train_scaler_from_csv(data=iris_data.outputs["data"])
    _ = upload_sklearn_model_to_mlflow(
    model_name="scaler",
    model=scaled_data.outputs["model"],
    input_example=scaled_data.outputs["input_example"],
    signature=scaled_data.outputs["signature"],
    conda_env=scaled_data.outputs["conda_env"],
    )
    model = train_svc_from_csv(
    train_data=scaled_data.outputs["scaled_data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name="svc",
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(multi_model_pipeline, "multi_model_pipeline.yaml")

    If you upload the pipeline, it will look like this. -children-kubeflow.png

    When you check the MLflow dashboard, two models will be generated, as shown below.

    children-mlflow.png

    After checking the run_id of each one, define the SeldonDeployment spec as follows.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph.

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc.

    Now let's create the above specifications in a yaml file.

    cat <<EOF > multi-model.yaml
    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    EOF

    Create an API through the following command.

    kubectl apply -f multi-model.yaml

    If properly performed, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/multi-model-example created

    Check to see if it has been generated normally.

    kubectl get po -n kubeflow-user-example-com | grep multi-model-example

    If it is created normally, a similar pod will be created.

    multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
    - +children-kubeflow.png

    When you check the MLflow dashboard, two models will be generated, as shown below.

    children-mlflow.png

    After checking the run_id of each one, define the SeldonDeployment spec as follows.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: seldonio/mlflowserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    Two models have been created so each model's initContainer and container must be defined. This field takes input as an array and the order does not matter. The order in which the models are executed is defined in the graph.

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"

    The operation of the graph is to convert the initial value received into a predefined predict_method and then pass it to the model defined as children. In this case, the data is passed from scaler -> svc.

    Now let's create the above specifications in a yaml file.

    cat <<EOF > multi-model.yaml
    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: multi-model-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: scaler-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/7f445015a0e94519b003d316478766ef/artifacts/scaler"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret
    - name: svc-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/87eb168e76264b39a24b0e5ca0fe922b/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: scaler
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0
    - name: svc
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: scaler
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: predict_method
    type: STRING
    value: "transform"
    children:
    - name: svc
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    EOF

    Create an API through the following command.

    kubectl apply -f multi-model.yaml

    If properly performed, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/multi-model-example created

    Check to see if it has been generated normally.

    kubectl get po -n kubeflow-user-example-com | grep multi-model-example

    If it is created normally, a similar pod will be created.

    multi-model-example-model-0-scaler-svc-9955fb795-n9ffw   4/4     Running     0          2m30s
    + \ No newline at end of file diff --git a/en/docs/api-deployment/seldon-fields/index.html b/en/docs/api-deployment/seldon-fields/index.html index 6cc011a9..b724032c 100644 --- a/en/docs/api-deployment/seldon-fields/index.html +++ b/en/docs/api-deployment/seldon-fields/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    4. Seldon Fields

    Summary of how Seldon Core creates an API server:

    1. initContainer downloads the required model from the model repository.
    2. The downloaded model is passed to the container.
    3. The container runs an API server enclosing the model.
    4. The API can be requested at the generated API server address to receive the inference values from the model.

    The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    The name and predictors fields of SeldonDeployment are required fields. name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. predictors must be a single array consisting of name, componentSpecs and graph defined. Here also, name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect.

    Now let's take a look at the fields that need to be defined in componentSpecs and graph.

    componentSpecs

    componentSpecs must be a single array consisting of the spec key. The spec must have the fields volumes, initContainers and containers defined.

    volumes

    volumes:
    - name: model-provision-location
    emptyDir: {}

    Volumes refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components name and emptyDir. These values are used only once when downloading and moving the models, so they do not need to be modified significantly.

    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    The args field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations.

    initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry.

    The value of initContainer consists of n arrays, and each model needs to be specified separately.

    name

    name is the name of the pod in Kubernetes, and it is recommended to use {model_name}-initializer for debugging.

    image

    image is the name of the image used to download the model, and there are two recommended images by

    • gcr.io/kfserving/storage-initializer:v0.4.0
    • seldonio/rclone-storage-initializer:1.13.0-dev

    For more detailed information, please refer to the following resources:

    In MLOps for ALL, we use kfserving for downloading and storing models.

    args

    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"

    When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (run), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in /mnt/models).

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    volumeMounts is a field that attaches volumes to the Kubernetes to share /mnt/models as described in volumes. For more information, refer to Kubernetes Volume Kubernetes Volume."

    container

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    Container defines the fields that determine the configuration when the model is run in an API form.

    name

    The name field refers to the name of the pod in Kubernetes. It should be the name of the model being used.

    image

    The image field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded.

    Seldon Core provides official images for different types of models, including:

    • seldonio/sklearnserver
    • seldonio/mlflowserver
    • seldonio/xgboostserver
    • seldonio/tfserving

    You can choose the appropriate image based on the type of model you are using.

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true

    This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, readOnly: true will also be given.

    securityContext

    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service).

    graph

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    This is a field that defines the order in which the model operates.

    name

    The name field refers to the name of the model graph. It should match the name defined in the container.

    type

    The type field can have four different values:

    1. TRANSFORMER
    2. MODEL
    3. OUTPUT_TRANSFORMER
    4. ROUTER

    For detailed explanations of each type, you can refer to the Seldon Core Complex Graphs Metadata Example.

    parameters

    The parameters field contains values used in the class init. For the sklearnserver, you can find the required values in the following file.

    class SKLearnServer(SeldonComponent):
    def __init__(self, model_uri: str = None, method: str = "predict_proba"):

    If you look at the code, you can define model_uri and method.

    children

    The children field is used when creating the sequence diagram. More details about this field will be explained on the following page.

    - +
    Version: Next

    4. Seldon Fields

    Summary of how Seldon Core creates an API server:

    1. initContainer downloads the required model from the model repository.
    2. The downloaded model is passed to the container.
    3. The container runs an API server enclosing the model.
    4. The API can be requested at the generated API server address to receive the inference values from the model.

    The yaml file defining the custom resource, SeldonDeployment, which is most commonly used when using Seldon Core is as follows:

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    The name and predictors fields of SeldonDeployment are required fields. name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect. predictors must be a single array consisting of name, componentSpecs and graph defined. Here also, name is mainly used as a name to differentiate pods in Kubernetes and does not have a major effect.

    Now let's take a look at the fields that need to be defined in componentSpecs and graph.

    componentSpecs

    componentSpecs must be a single array consisting of the spec key. The spec must have the fields volumes, initContainers and containers defined.

    volumes

    volumes:
    - name: model-provision-location
    emptyDir: {}

    Volumes refer to the space used to store the models downloaded from the initContainer, which is received as an array with the components name and emptyDir. These values are used only once when downloading and moving the models, so they do not need to be modified significantly.

    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    The args field contains the system arguments necessary to download the model from the model repository and move it to the specified model path. It provides the required parameters for the initContainer to perform the downloading and storage operations.

    initContainer is responsible for downloading the model to be used from the API, so the fields used determine the information needed to download data from the model registry.

    The value of initContainer consists of n arrays, and each model needs to be specified separately.

    name

    name is the name of the pod in Kubernetes, and it is recommended to use {model_name}-initializer for debugging.

    image

    image is the name of the image used to download the model, and there are two recommended images by

    • gcr.io/kfserving/storage-initializer:v0.4.0
    • seldonio/rclone-storage-initializer:1.13.0-dev

    For more detailed information, please refer to the following resources:

    In MLOps for ALL, we use kfserving for downloading and storing models.

    args

    args:
    - "gs://seldon-models/v1.12.0-dev/sklearn/iris"
    - "/mnt/models"

    When the gcr.io/kfserving/storage-initializer:v0.4.0 Docker image is run (run), it takes an argument in the form of an array. The first array value is the address of the model to be downloaded. The second array value is the address where the downloaded model will be stored (Seldon Core usually stores it in /mnt/models).

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location

    volumeMounts is a field that attaches volumes to the Kubernetes to share /mnt/models as described in volumes. For more information, refer to Kubernetes Volume Kubernetes Volume."

    container

    containers:
    - name: model
    image: seldonio/sklearnserver:1.8.0-dev
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    Container defines the fields that determine the configuration when the model is run in an API form.

    name

    The name field refers to the name of the pod in Kubernetes. It should be the name of the model being used.

    image

    The image field represents the image used to convert the model into an API. The image should have all the necessary packages installed when the model is loaded.

    Seldon Core provides official images for different types of models, including:

    • seldonio/sklearnserver
    • seldonio/mlflowserver
    • seldonio/xgboostserver
    • seldonio/tfserving

    You can choose the appropriate image based on the type of model you are using.

    volumeMounts

    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true

    This is a field that tells the path where the data downloaded from initContainer is located. Here, to prevent the model from being modified, readOnly: true will also be given.

    securityContext

    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    When installing necessary packages, pod may not be able to perform the package installation due to lack of permission. To address this, root permission is granted (although this could cause security issues when in actual service).

    graph

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    This is a field that defines the order in which the model operates.

    name

    The name field refers to the name of the model graph. It should match the name defined in the container.

    type

    The type field can have four different values:

    1. TRANSFORMER
    2. MODEL
    3. OUTPUT_TRANSFORMER
    4. ROUTER

    For detailed explanations of each type, you can refer to the Seldon Core Complex Graphs Metadata Example.

    parameters

    The parameters field contains values used in the class init. For the sklearnserver, you can find the required values in the following file.

    class SKLearnServer(SeldonComponent):
    def __init__(self, model_uri: str = None, method: str = "predict_proba"):

    If you look at the code, you can define model_uri and method.

    children

    The children field is used when creating the sequence diagram. More details about this field will be explained on the following page.

    + \ No newline at end of file diff --git a/en/docs/api-deployment/seldon-iris/index.html b/en/docs/api-deployment/seldon-iris/index.html index 27e5a28a..83d5c3eb 100644 --- a/en/docs/api-deployment/seldon-iris/index.html +++ b/en/docs/api-deployment/seldon-iris/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Because this iris model is trained through the sklearn framework, we use SKLEARN_SERVER.

    cat <<EOF > iris-sdep.yaml
    apiVersion: machinelearning.seldon.io/v1alpha2
    kind: SeldonDeployment
    metadata:
    name: sklearn
    namespace: seldon-deploy
    spec:
    name: iris
    predictors:
    - graph:
    children: []
    implementation: SKLEARN_SERVER
    modelUri: gs://seldon-models/v1.12.0-dev/sklearn/iris
    name: classifier
    name: default
    replicas: 1
    EOF

    Deploy yaml file.

    kubectl apply -f iris-sdep.yaml

    Check if the deployment was successful through the following command.

    kubectl get pods --selector seldon-app=sklearn-default -n seldon-deploy

    If everyone runs, similar results will be printed.

    NAME                                            READY   STATUS    RESTARTS   AGE
    sklearn-default-0-classifier-5fdfd7bb77-ls9tr 2/2 Running 0 5m

    Ingress URL

    Now, send a inference request to the deployed model to get the inference result. The API created by the SeldonDeployment follows the following rule: http://{NODE_IP}:{NODE_PORT}/seldon/{namespace}/{seldon-deployment-name}/api/v1.0/{method-name}/

    NODE_IP / NODE_PORT

    Since Seldon Core was installed with Ambassador as the Ingress Controller, all APIs created by SeldonDeployment can be requested through the Ambassador Ingress gateway.

    Therefore, first set the url of the Ambassador Ingress Gateway as an environment variable.

    export NODE_IP=$(kubectl get nodes -o jsonpath='{ $.items[*].status.addresses[?(@.type=="InternalIP")].address }')
    export NODE_PORT=$(kubectl get service ambassador -n seldon-system -o jsonpath="{.spec.ports[0].nodePort}")

    Check the set url.

    echo "NODE_IP"=$NODE_IP
    echo "NODE_PORT"=$NODE_PORT

    It should be outputted similarly as follows, and if set through the cloud, you can check that internal IP address is set.

    NODE_IP=192.168.0.19
    NODE_PORT=30486

    namespace / seldon-deployment-name

    This refers to the namespace and seldon-deployment-name where the SeldonDeployment is deployed and used to define the values defined in the metadata when defining the spec.

    metadata:
    name: sklearn
    namespace: seldon-deploy

    In the example above, namespace is seldon-deploy, seldon-deployment-name is sklearn.

    method-name

    In SeldonDeployment, the commonly used method-name has two options:

    1. doc
    2. predictions

    The detailed usage of each method is explained below.

    Using Swagger

    First, let's explore how to use the doc method, which allows access to the Swagger generated by Seldon.

    1. Accessing Swagger

    According to the provided ingress URL rules, you can access the Swagger documentation using the following URL: http://192.168.0.19:30486/seldon/seldon-deploy/sklearn/api/v1.0/doc/

    iris-swagger1.png

    2. Selecting Swagger Predictions

    In the Swagger UI, select the /seldon/seldon-deploy/sklearn/api/v1.0/predictions endpoint.

    iris-swagger2.png

    3. Choosing Try it out

    iris-swagger3.png

    4. Inputting data in the Request body

    iris-swagger4.png

    Enter the following data into the Request body.

    {
    "data": {
    "ndarray":[[1.0, 2.0, 5.0, 6.0]]
    }
    }

    5. Check the inference results

    You can click the Execute button to obtain the inference result.

    iris-swagger5.png

    If everything is executed successfully, you will obtain the following inference result.

    {
    "data": {
    "names": [
    "t:0",
    "t:1",
    "t:2"
    ],
    "ndarray": [
    [
    9.912315378486697e-7,
    0.0007015931307746079,
    0.9992974156376876
    ]
    ]
    },
    "meta": {
    "requestPath": {
    "classifier": "seldonio/sklearnserver:1.11.2"
    }
    }
    }

    Using CLI

    Also, you can use http client CLI tools such as curl to make API requests. -For example, requesting /predictions as follows

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    You can confirm that the following response is outputted normally.

    {"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
    - +For example, requesting /predictions as follows

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    You can confirm that the following response is outputted normally.

    {"data":{"names":["t:0","t:1","t:2"],"ndarray":[[0.0006985194531162835,0.00366803903943666,0.995633441507447]]},"meta":{"requestPath":{"classifier":"seldonio/sklearnserver:1.11.2"}}}
    + \ No newline at end of file diff --git a/en/docs/api-deployment/seldon-mlflow/index.html b/en/docs/api-deployment/seldon-mlflow/index.html index 1cd64191..682baa01 100644 --- a/en/docs/api-deployment/seldon-mlflow/index.html +++ b/en/docs/api-deployment/seldon-mlflow/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    5. Model from MLflow

    Model from MLflow

    On this page, we will learn how to create an API using a model saved in the MLflow Component.

    Secret

    The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows.

    apiVersion: v1
    type: Opaque
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    data:
    AWS_ACCESS_KEY_ID: bWluaW8K=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp
    USE_SSL: ZmFsc2U=

    The input value for AWS_ACCESS_KEY_ID is minio. However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following.

    The values that need to be entered in data are as follows.

    The encoding can be done using the following command.

    echo -n minio | base64

    Then the following values will be output.

    bWluaW8=

    If you do the encoding for the entire value, it will look like this:

    You can generate a yaml file through the following command to create the secret.

    cat <<EOF > seldon-init-container-secret.yaml
    apiVersion: v1
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    type: Opaque
    data:
    AWS_ACCESS_KEY_ID: bWluaW8=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
    USE_SSL: ZmFsc2U=
    EOF

    Create the secret through the following command.

    kubectl apply -f seldon-init-container-secret.yaml

    If performed normally, it will be output as follows.

    secret/seldon-init-container-secret created

    Seldon Core yaml

    Now let's write the yaml file to create Seldon Core.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    There are two major changes compared to the previously created Seldon Fields:

    1. The envFrom field is added to the initContainer.
    2. The address in the args has been changed to s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc.

    args

    Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?

    To find the path, go back to MLflow and click on the run, then click on the model, as shown below:

    seldon-mlflow-0.png

    You can use the path obtained from there.

    envFrom

    This process involves providing the environment variables required to access MinIO and download the model. We will use the seldon-init-container-secret created earlier.

    API Creation

    First, let's generate the YAML file based on the specification defined above.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: xtype
    type: STRING
    value: "dataframe"
    children: []
    EOF

    Create a seldon pod.

    kubectl apply -f seldon-mlflow.yaml

    If it is performed normally, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/seldon-example created

    Now we wait until the pod is up and running properly.

    kubectl get po -n kubeflow-user-example-com | grep seldon

    If it is outputted similarly to the following, the API has been created normally.

    seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

    You can confirm the execution through the following request on the API created through the CLI.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{
    "data": {
    "ndarray": [
    [
    143.0,
    0.0,
    30.0,
    30.0
    ]
    ],
    "names": [
    "sepal length (cm)",
    "sepal width (cm)",
    "petal length (cm)",
    "petal width (cm)"
    ]
    }
    }'

    If executed normally, you can get the following results.

    {"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
    - +
    Version: Next

    5. Model from MLflow

    Model from MLflow

    On this page, we will learn how to create an API using a model saved in the MLflow Component.

    Secret

    The initContainer needs credentials to access minio and download the model. The credentials for access to minio are as follows.

    apiVersion: v1
    type: Opaque
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    data:
    AWS_ACCESS_KEY_ID: bWluaW8K=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLm1ha2luYXJvY2tzLmFp
    USE_SSL: ZmFsc2U=

    The input value for AWS_ACCESS_KEY_ID is minio. However, since the input value for the secret must be an encoded value, the value that is actually entered must be the value that comes out after performing the following.

    The values that need to be entered in data are as follows.

    The encoding can be done using the following command.

    echo -n minio | base64

    Then the following values will be output.

    bWluaW8=

    If you do the encoding for the entire value, it will look like this:

    You can generate a yaml file through the following command to create the secret.

    cat <<EOF > seldon-init-container-secret.yaml
    apiVersion: v1
    kind: Secret
    metadata:
    name: seldon-init-container-secret
    namespace: kubeflow-user-example-com
    type: Opaque
    data:
    AWS_ACCESS_KEY_ID: bWluaW8=
    AWS_SECRET_ACCESS_KEY: bWluaW8xMjM=
    AWS_ENDPOINT_URL: aHR0cDovL21pbmlvLXNlcnZpY2Uua3ViZWZsb3cuc3ZjOjkwMDA=
    USE_SSL: ZmFsc2U=
    EOF

    Create the secret through the following command.

    kubectl apply -f seldon-init-container-secret.yaml

    If performed normally, it will be output as follows.

    secret/seldon-init-container-secret created

    Seldon Core yaml

    Now let's write the yaml file to create Seldon Core.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    children: []

    There are two major changes compared to the previously created Seldon Fields:

    1. The envFrom field is added to the initContainer.
    2. The address in the args has been changed to s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc.

    args

    Previously, we mentioned that the first element of the args array is the path to the model we want to download. So, how can we determine the path of the model stored in MLflow?

    To find the path, go back to MLflow and click on the run, then click on the model, as shown below:

    seldon-mlflow-0.png

    You can use the path obtained from there.

    envFrom

    This process involves providing the environment variables required to access MinIO and download the model. We will use the seldon-init-container-secret created earlier.

    API Creation

    First, let's generate the YAML file based on the specification defined above.

    apiVersion: machinelearning.seldon.io/v1
    kind: SeldonDeployment
    metadata:
    name: seldon-example
    namespace: kubeflow-user-example-com
    spec:
    name: model
    predictors:
    - name: model

    componentSpecs:
    - spec:
    volumes:
    - name: model-provision-location
    emptyDir: {}

    initContainers:
    - name: model-initializer
    image: gcr.io/kfserving/storage-initializer:v0.4.0
    args:
    - "s3://mlflow/mlflow/artifacts/0/74ba8e33994144f599e50b3be176cdb0/artifacts/svc"
    - "/mnt/models"
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    envFrom:
    - secretRef:
    name: seldon-init-container-secret

    containers:
    - name: model
    image: ghcr.io/mlops-for-all/mlflowserver
    volumeMounts:
    - mountPath: /mnt/models
    name: model-provision-location
    readOnly: true
    securityContext:
    privileged: true
    runAsUser: 0
    runAsGroup: 0

    graph:
    name: model
    type: MODEL
    parameters:
    - name: model_uri
    type: STRING
    value: "/mnt/models"
    - name: xtype
    type: STRING
    value: "dataframe"
    children: []
    EOF

    Create a seldon pod.

    kubectl apply -f seldon-mlflow.yaml

    If it is performed normally, it will be outputted as follows.

    seldondeployment.machinelearning.seldon.io/seldon-example created

    Now we wait until the pod is up and running properly.

    kubectl get po -n kubeflow-user-example-com | grep seldon

    If it is outputted similarly to the following, the API has been created normally.

    seldon-example-model-0-model-5c949bd894-c5f28      3/3     Running     0          69s

    You can confirm the execution through the following request on the API created through the CLI.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{
    "data": {
    "ndarray": [
    [
    143.0,
    0.0,
    30.0,
    30.0
    ]
    ],
    "names": [
    "sepal length (cm)",
    "sepal width (cm)",
    "petal length (cm)",
    "petal width (cm)"
    ]
    }
    }'

    If executed normally, you can get the following results.

    {"data":{"names":[],"ndarray":["Virginica"]},"meta":{"requestPath":{"model":"ghcr.io/mlops-for-all/mlflowserver:e141f57"}}}
    + \ No newline at end of file diff --git a/en/docs/api-deployment/seldon-pg/index.html b/en/docs/api-deployment/seldon-pg/index.html index e80de70d..36203f8d 100644 --- a/en/docs/api-deployment/seldon-pg/index.html +++ b/en/docs/api-deployment/seldon-pg/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    3. Seldon Monitoring

    Grafana & Prometheus

    Now, let's perform repeated API requests with the SeldonDeployment we created on the previous page and check if the dashboard changes.

    Dashboard

    Forward the dashboard created earlier.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Request API

    Request repeated to the previously created Seldon Deployment.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from 0 ops.

    repeat-raise.png

    This confirms that Prometheus and Grafana have been successfully installed and configured.

    - +
    Version: Next

    3. Seldon Monitoring

    Grafana & Prometheus

    Now, let's perform repeated API requests with the SeldonDeployment we created on the previous page and check if the dashboard changes.

    Dashboard

    Forward the dashboard created earlier.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Request API

    Request repeated to the previously created Seldon Deployment.

    curl -X POST http://$NODE_IP:$NODE_PORT/seldon/seldon-deploy/sklearn/api/v1.0/predictions \
    -H 'Content-Type: application/json' \
    -d '{ "data": { "ndarray": [[1,2,3,4]] } }'

    Furthermore, when checking the Grafana dashboard, you can observe that the Global Request Rate increases momentarily from 0 ops.

    repeat-raise.png

    This confirms that Prometheus and Grafana have been successfully installed and configured.

    + \ No newline at end of file diff --git a/en/docs/api-deployment/what-is-api-deployment/index.html b/en/docs/api-deployment/what-is-api-deployment/index.html index 6995da63..f597fe39 100644 --- a/en/docs/api-deployment/what-is-api-deployment/index.html +++ b/en/docs/api-deployment/what-is-api-deployment/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    1. What is API Deployment?

    What is API Deployment?

    After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily.

    When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people.

    Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services.

    However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines.

    Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response.

    Some well-known open-source inference engines include:

    While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost.

    Deploying and serving the model's inference results through an API is called API deployment.

    Serving Framework

    I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks.

    To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment.

    Some popular serving frameworks include:

    In MLOps for ALL, we use Seldon Core to demonstrate the process of API deployment.

    - +
    Version: Next

    1. What is API Deployment?

    What is API Deployment?

    After training a machine learning model, how should it be used? When training a machine learning model, you expect a model with higher performance to come out, but when you infer with the trained model, you want to get the inference results quickly and easily.

    When you want to check the inference results of the model, you can load the trained model and infer through a Jupyter notebook or a Python script. However, this method becomes inefficient as the model gets bigger, and you can only use the model in the environment where the trained model exists and cannot be used by many people.

    Therefore, when machine learning is used in actual services, it uses an API to use the trained model. The model is loaded only once in the environment where the API server is running, and you can easily get the inference results using DNS, and you can also link it with other services.

    However, there is a lot of ancillary work necessary to make the model into an API. In order to make it easier to make an API, machine learning frameworks such as Tensorflow have developed inference engines.

    Using inference engines, we can create APIs (REST or gRPC) that can load and infer from machine learning models developed and trained in the corresponding frameworks. When we send a request with the data we want to infer to an API server built using these inference engines, the engine performs the inference and sends back the results in the response.

    Some well-known open-source inference engines include:

    While not officially supported in open-source, there are also inference engines developed for popular frameworks like sklearn and XGBoost.

    Deploying and serving the model's inference results through an API is called API deployment.

    Serving Framework

    I introduced the fact that various inference engines have been developed. Now, if we want to deploy these inference engines in a Kubernetes environment for API deployment, what steps are involved? We need to deploy various Kubernetes resources such as Deployments for the inference engines, Services to create endpoints for sending inference requests, and Ingress to forward external inference requests to the inference engines. Additionally, we may need to handle requirements such as scaling out when there is a high volume of inference requests, monitoring the status of the inference engines, and updating the version when an improved model is available. There are many considerations when operating an inference engine, and it goes beyond just a few tasks.

    To address these requirements, serving frameworks have been developed to further abstract the deployment of inference engines in a Kubernetes environment.

    Some popular serving frameworks include:

    In MLOps for ALL, we use Seldon Core to demonstrate the process of API deployment.

    + \ No newline at end of file diff --git a/en/docs/appendix/metallb/index.html b/en/docs/appendix/metallb/index.html index 6cc60b9f..06bbfd0b 100644 --- a/en/docs/appendix/metallb/index.html +++ b/en/docs/appendix/metallb/index.html @@ -7,7 +7,7 @@ - + @@ -19,8 +19,8 @@ When using Layer 2 mode, it is not necessary to bind IP to the network interface of the worker node, because it operates in a way that it responds directly to the ARP request of the local network and provides the computer's MAC address to the client.

    The following metallb_config.yaml file is the configuration for MetalLB to provide control over the IP range of 192.168.35.100 ~ 192.168.35.110, and to configure Layer 2 mode.

    In case the cluster node and the client node are separated, the range of 192.168.35.100 ~ 192.168.35.110 must be accessible by both the client node and the cluster node.

    metallb_config.yaml

    apiVersion: v1
    kind: ConfigMap
    metadata:
    namespace: metallb-system
    name: config
    data:
    config: |
    address-pools:
    - name: default
    protocol: layer2
    addresses:
    - 192.168.35.100-192.168.35.110 # IP 대역폭

    Apply the above settings.

    kubectl apply -f metallb_config.yaml 

    If deployed normally, it will output as follows.

    configmap/config created

    Using MetalLB

    Kubeflow Dashboard

    First, before getting the load-balancing feature from MetalLB, check the current status by changing the type of the istio-ingressgateway service in the istio-system namespace to LoadBalancer to provide the Kubeflow Dashboard.

    kubectl get svc/istio-ingressgateway -n istio-system

    The type of this service is ClusterIP and you can see that the External-IP value is none.

    NAME                   TYPE        CLUSTER-IP    EXTERNAL-IP   PORT(S)                                        AGE
    istio-ingressgateway ClusterIP 10.103.72.5 <none> 15021/TCP,80/TCP,443/TCP,31400/TCP,15443/TCP 4h21m

    Change the type to LoadBalancer and if you want to input a desired IP address, add the loadBalancerIP item.
    If you do not add it, IP addresses will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/istio-ingressgateway -n istio-system
    spec:
    clusterIP: 10.103.72.5
    clusterIPs:
    - 10.103.72.5
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: status-port
    port: 15021
    protocol: TCP
    targetPort: 15021
    - name: http2
    port: 80
    protocol: TCP
    targetPort: 8080
    - name: https
    port: 443
    protocol: TCP
    targetPort: 8443
    - name: tcp
    port: 31400
    protocol: TCP
    targetPort: 31400
    - name: tls
    port: 15443
    protocol: TCP
    targetPort: 15443
    selector:
    app: istio-ingressgateway
    istio: ingressgateway
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.100 # Add IP
    status:
    loadBalancer: {}

    If you check again, you will see that the External-IP value is 192.168.35.100.

    kubectl get svc/istio-ingressgateway -n istio-system
    NAME                   TYPE           CLUSTER-IP    EXTERNAL-IP      PORT(S)                                                                      AGE
    istio-ingressgateway LoadBalancer 10.103.72.5 192.168.35.100 15021:31054/TCP,80:30853/TCP,443:30443/TCP,31400:30012/TCP,15443:31650/TCP 5h1m

    Open a web browser and connect to http://192.168.35.100 to verify the following screen is output.

    login-after-istio-ingressgateway-setting.png

    minio Dashboard

    First, we check the current status before changing the type of minio-service, which provides the Dashboard of minio, in the kubeflow namespace to LoadBalancer to receive the load balancing function from MetalLB.

    kubectl get svc/minio-service -n kubeflow

    The type of this service is ClusterIP and you can confirm that the External-IP value is none.

    NAME            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)    AGE
    minio-service ClusterIP 10.109.209.87 <none> 9000/TCP 5h14m

    Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item. If you do not add, the IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/minio-service -n kubeflow
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    kubectl.kubernetes.io/last-applied-configuration: |
    {"apiVersion":"v1","kind":"Service","metadata":{"annotations":{},"labels":{"application-crd-id":"kubeflow-pipelines"},"name":"minio-ser>
    creationTimestamp: "2022-01-05T08:44:23Z"
    labels:
    application-crd-id: kubeflow-pipelines
    name: minio-service
    namespace: kubeflow
    resourceVersion: "21120"
    uid: 0053ee28-4f87-47bb-ad6b-7ad68aa29a48
    spec:
    clusterIP: 10.109.209.87
    clusterIPs:
    - 10.109.209.87
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: http
    port: 9000
    protocol: TCP
    targetPort: 9000
    selector:
    app: minio
    application-crd-id: kubeflow-pipelines
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.101 # Add IP
    status:
    loadBalancer: {}

    If we check again, we can see that the External-IP value is 192.168.35.101.

    kubectl get svc/minio-service -n kubeflow
    NAME            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)          AGE
    minio-service LoadBalancer 10.109.209.87 192.168.35.101 9000:31371/TCP 5h21m

    Open a web browser and connect to http://192.168.35.101:9000 to confirm the following screen is printed.

    login-after-minio-setting.png

    mlflow Dashboard

    First, we check the current status before changing the type of mlflow-server-service service in the mlflow-system namespace that provides the mlflow Dashboard to LoadBalancer to receive load balancing function from MetalLB.

    kubectl get svc/mlflow-server-service -n mlflow-system

    The type of this service is ClusterIP and you can confirm that the External-IP value is none.

    NAME                    TYPE        CLUSTER-IP       EXTERNAL-IP   PORT(S)    AGE
    mlflow-server-service ClusterIP 10.111.173.209 <none> 5000/TCP 4m50s

    Change the type to LoadBalancer and if you want to input the desired IP address, add the loadBalancerIP item.
    If you do not add it, the IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/mlflow-server-service -n mlflow-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: mlflow-server
    meta.helm.sh/release-namespace: mlflow-system
    creationTimestamp: "2022-01-07T04:00:19Z"
    labels:
    app.kubernetes.io/managed-by: Helm
    name: mlflow-server-service
    namespace: mlflow-system
    resourceVersion: "276246"
    uid: e5d39fb7-ad98-47e7-b512-f9c673055356
    spec:
    clusterIP: 10.111.173.209
    clusterIPs:
    - 10.111.173.209
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - port: 5000
    protocol: TCP
    targetPort: 5000
    selector:
    app.kubernetes.io/name: mlflow-server
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.102 # Add IP
    status:
    loadBalancer: {}

    If we check again, we can see that the External-IP value is 192.168.35.102.

    kubectl get svc/mlflow-server-service -n mlflow-system
    NAME                    TYPE           CLUSTER-IP       EXTERNAL-IP      PORT(S)          AGE
    mlflow-server-service LoadBalancer 10.111.173.209 192.168.35.102 5000:32287/TCP 6m11s

    Open the web browser and connect to http://192.168.35.102:5000 to confirm the following screen is displayed.

    login-after-mlflow-setting.png

    Grafana Dashboard

    First, check the current status before changing the type of seldon-core-analytics-grafana service in the seldon-system namespace which provides Grafana's Dashboard to receive Load Balancing function from MetalLB.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system

    The type of the corresponding service is ClusterIP, and you can see that the External-IP value is none.

    NAME                            TYPE        CLUSTER-IP      EXTERNAL-IP   PORT(S)   AGE
    seldon-core-analytics-grafana ClusterIP 10.109.20.161 <none> 80/TCP 94s

    Change the type to LoadBalancer and if you want to enter an IP address, add the loadBalancerIP item.
    -If not, an IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: seldon-core-analytics
    meta.helm.sh/release-namespace: seldon-system
    creationTimestamp: "2022-01-07T04:16:47Z"
    labels:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/managed-by: Helm
    app.kubernetes.io/name: grafana
    app.kubernetes.io/version: 7.0.3
    helm.sh/chart: grafana-5.1.4
    name: seldon-core-analytics-grafana
    namespace: seldon-system
    resourceVersion: "280605"
    uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
    spec:
    clusterIP: 10.109.20.161
    clusterIPs:
    - 10.109.20.161
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: service
    port: 80
    protocol: TCP
    targetPort: 3000
    selector:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/name: grafana
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.103 # Add IP
    status:
    loadBalancer: {}

    If you check again, you can see that the External-IP value is 192.168.35.103.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system
    NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
    seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

    Open the Web Browser and connect to http://192.168.35.103:80 to confirm that the following screen is displayed.

    login-after-grafana-setting.png

    - +If not, an IP address will be assigned sequentially from the IP address pool set above.

    kubectl edit svc/seldon-core-analytics-grafana -n seldon-system
    apiVersion: v1
    kind: Service
    metadata:
    annotations:
    meta.helm.sh/release-name: seldon-core-analytics
    meta.helm.sh/release-namespace: seldon-system
    creationTimestamp: "2022-01-07T04:16:47Z"
    labels:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/managed-by: Helm
    app.kubernetes.io/name: grafana
    app.kubernetes.io/version: 7.0.3
    helm.sh/chart: grafana-5.1.4
    name: seldon-core-analytics-grafana
    namespace: seldon-system
    resourceVersion: "280605"
    uid: 75073b78-92ec-472c-b0d5-240038ea8fa5
    spec:
    clusterIP: 10.109.20.161
    clusterIPs:
    - 10.109.20.161
    ipFamilies:
    - IPv4
    ipFamilyPolicy: SingleStack
    ports:
    - name: service
    port: 80
    protocol: TCP
    targetPort: 3000
    selector:
    app.kubernetes.io/instance: seldon-core-analytics
    app.kubernetes.io/name: grafana
    sessionAffinity: None
    type: LoadBalancer # Change ClusterIP to LoadBalancer
    loadBalancerIP: 192.168.35.103 # Add IP
    status:
    loadBalancer: {}

    If you check again, you can see that the External-IP value is 192.168.35.103.

    kubectl get svc/seldon-core-analytics-grafana -n seldon-system
    NAME                            TYPE           CLUSTER-IP      EXTERNAL-IP      PORT(S)        AGE
    seldon-core-analytics-grafana LoadBalancer 10.109.20.161 192.168.35.103 80:31191/TCP 5m14s

    Open the Web Browser and connect to http://192.168.35.103:80 to confirm that the following screen is displayed.

    login-after-grafana-setting.png

    + \ No newline at end of file diff --git a/en/docs/appendix/pyenv/index.html b/en/docs/appendix/pyenv/index.html index e2bf25ba..aba4ba55 100644 --- a/en/docs/appendix/pyenv/index.html +++ b/en/docs/appendix/pyenv/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    1. Install Python virtual environment

    Python virtual environment

    When working with Python, there may be cases where you want to use multiple versions of Python environments or manage package versions separately for different projects.

    To easily manage Python environments or Python package environments in a virtualized manner, there are tools available such as pyenv, conda, virtualenv, and venv.

    Among these, MLOps for ALL covers the installation of pyenv and pyenv-virtualenv.
    pyenv helps manage Python versions, while pyenv-virtualenv is a plugin for pyenv that helps manage Python package environments.

    Installing pyenv

    Prerequisites

    Prerequisites vary depending on the operating system. Please refer to the following page and install the required packages accordingly.

    Installation - macOS

    1. Install pyenv, pyenv-virtualenv
    brew update
    brew install pyenv
    brew install pyenv-virtualenv
    1. Set pyenv

    For macOS, assuming the use of zsh since the default shell has changed to zsh in Catalina version and later, setting up pyenv.

    echo 'eval "$(pyenv init -)"' >> ~/.zshrc
    echo 'eval "$(pyenv virtualenv-init -)"' >> ~/.zshrc
    source ~/.zshrc

    Check if the pyenv command is executed properly.

    pyenv --help
    $ pyenv --help
    Usage: pyenv <command> [<args>]

    Some useful pyenv commands are:
    --version Display the version of pyenv
    activate Activate virtual environment
    commands List all available pyenv commands
    deactivate Deactivate virtual environment
    exec Run an executable with the selected Python version
    global Set or show the global Python version(s)
    help Display help for a command
    hooks List hook scripts for a given pyenv command
    init Configure the shell environment for pyenv
    install Install a Python version using python-build
    local Set or show the local application-specific Python version(s)
    prefix Display prefix for a Python version
    rehash Rehash pyenv shims (run this after installing executables)
    root Display the root directory where versions and shims are kept
    shell Set or show the shell-specific Python version
    shims List existing pyenv shims
    uninstall Uninstall a specific Python version
    version Show the current Python version(s) and its origin
    version-file Detect the file that sets the current pyenv version
    version-name Show the current Python version
    version-origin Explain how the current Python version is set
    versions List all Python versions available to pyenv
    virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
    virtualenv-delete Uninstall a specific Python virtualenv
    virtualenv-init Configure the shell environment for pyenv-virtualenv
    virtualenv-prefix Display real_prefix for a Python virtualenv version
    virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
    whence List all Python versions that contain the given executable
    which Display the full path to an executable

    See `pyenv help <command>' for information on a specific command.
    For full documentation, see: https://github.com/pyenv/pyenv#readme

    Installation - Ubuntu

    1. Install pyenv and pyenv-virtualenv
    curl https://pyenv.run | bash

    If the following content is output, it means that the installation is successful.

      % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
    Dload Upload Total Spent Left Speed
    0 0 0 0 0 0 0 0 --:--:-- --:--:-- 0 0 0 0 0 0 0 0 --:--:-- --:--:-- 100 270 100 270 0 0 239 0 0:00:01 0:00:01 --:--:-- 239
    Cloning into '/home/mlops/.pyenv'...
    r
    ...
    Skip...
    ...
    remote: Enumerating objects: 10, done.
    remote: Counting objects: 100% (10/10), done.
    remote: Compressing objects: 100% (6/6), done.
    remote: Total 10 (delta 1), reused 6 (delta 0), pack-reused 0
    Unpacking objects: 100% (10/10), 2.92 KiB | 2.92 MiB/s, done.

    WARNING: seems you still have not added 'pyenv' to the load path.


    # See the README for instructions on how to set up
    # your shell environment for Pyenv.

    # Load pyenv-virtualenv automatically by adding
    # the following to ~/.bashrc:

    eval "$(pyenv virtualenv-init -)"

    1. Set pyenv

    Assuming the use of bash shell as the default shell, configure pyenv and pyenv-virtualenv to be used in bash.

    sudo vi ~/.bashrc

    Enter the following string and save it.

    export PATH="$HOME/.pyenv/bin:$PATH"
    eval "$(pyenv init -)"
    eval "$(pyenv virtualenv-init -)"

    Restart the shell.

    exec $SHELL

    Check if the pyenv command is executed properly.

    pyenv --help

    If the following message is displayed, it means that the settings have been configured correctly.

    $ pyenv
    pyenv 2.2.2
    Usage: pyenv <command> [<args>]

    Some useful pyenv commands are:
    --version Display the version of pyenv
    activate Activate virtual environment
    commands List all available pyenv commands
    deactivate Deactivate virtual environment
    doctor Verify pyenv installation and development tools to build pythons.
    exec Run an executable with the selected Python version
    global Set or show the global Python version(s)
    help Display help for a command
    hooks List hook scripts for a given pyenv command
    init Configure the shell environment for pyenv
    install Install a Python version using python-build
    local Set or show the local application-specific Python version(s)
    prefix Display prefix for a Python version
    rehash Rehash pyenv shims (run this after installing executables)
    root Display the root directory where versions and shims are kept
    shell Set or show the shell-specific Python version
    shims List existing pyenv shims
    uninstall Uninstall a specific Python version
    version Show the current Python version(s) and its origin
    version-file Detect the file that sets the current pyenv version
    version-name Show the current Python version
    version-origin Explain how the current Python version is set
    versions List all Python versions available to pyenv
    virtualenv Create a Python virtualenv using the pyenv-virtualenv plugin
    virtualenv-delete Uninstall a specific Python virtualenv
    virtualenv-init Configure the shell environment for pyenv-virtualenv
    virtualenv-prefix Display real_prefix for a Python virtualenv version
    virtualenvs List all Python virtualenvs found in `$PYENV_ROOT/versions/*'.
    whence List all Python versions that contain the given executable
    which Display the full path to an executable

    See `pyenv help <command>' for information on a specific command.
    For full documentation, see: https://github.com/pyenv/pyenv#readme

    Using pyenv

    Install python version

    Using the pyenv install <Python-Version> command, you can install the desired Python version.
    -In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example.

    pyenv install 3.7.12

    If installed normally, the following message will be printed.

    $ pyenv install 3.7.12
    Downloading Python-3.7.12.tar.xz...
    -> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
    Installing Python-3.7.12...
    patching file Doc/library/ctypes.rst
    patching file Lib/test/test_unicode.py
    patching file Modules/_ctypes/_ctypes.c
    patching file Modules/_ctypes/callproc.c
    patching file Modules/_ctypes/ctypes.h
    patching file setup.py
    patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
    patching file Modules/_decimal/libmpdec/mpdecimal.h
    Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

    Create python virtual environment

    Create a Python virtual environment with the pyenv virtualenv <Installed-Python-Version> <Virtual-Environment-Name> command to create a Python virtual environment with the desired Python version.

    For example, let's create a Python virtual environment called demo with Python 3.7.12 version.

    pyenv virtualenv 3.7.12 demo
    $ pyenv virtualenv 3.7.12 demo
    Looking in links: /tmp/tmpffqys0gv
    Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
    Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

    Activating python virtual environment

    Use the pyenv activate <environment name> command to use the virtual environment created in this way.

    For example, we will use a Python virtual environment called demo.

    pyenv activate demo

    You can see that the information of the current virtual environment is printed at the front of the shell.

    Before

    mlops@ubuntu:~$ pyenv activate demo

    After

    pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
    (demo) mlops@ubuntu:~$

    Deactivating python virtual environment

    You can deactivate the currently active virtualenv by using the command source deactivate.

    source deactivate

    Before

    (demo) mlops@ubuntu:~$ source deactivate

    After

    mlops@ubuntu:~$ 
    - +In this page, we will install the Python 3.7.12 version that is used by Kubeflow by default as an example.

    pyenv install 3.7.12

    If installed normally, the following message will be printed.

    $ pyenv install 3.7.12
    Downloading Python-3.7.12.tar.xz...
    -> https://www.python.org/ftp/python/3.7.12/Python-3.7.12.tar.xz
    Installing Python-3.7.12...
    patching file Doc/library/ctypes.rst
    patching file Lib/test/test_unicode.py
    patching file Modules/_ctypes/_ctypes.c
    patching file Modules/_ctypes/callproc.c
    patching file Modules/_ctypes/ctypes.h
    patching file setup.py
    patching file 'Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst'
    patching file Modules/_decimal/libmpdec/mpdecimal.h
    Installed Python-3.7.12 to /home/mlops/.pyenv/versions/3.7.12

    Create python virtual environment

    Create a Python virtual environment with the pyenv virtualenv <Installed-Python-Version> <Virtual-Environment-Name> command to create a Python virtual environment with the desired Python version.

    For example, let's create a Python virtual environment called demo with Python 3.7.12 version.

    pyenv virtualenv 3.7.12 demo
    $ pyenv virtualenv 3.7.12 demo
    Looking in links: /tmp/tmpffqys0gv
    Requirement already satisfied: setuptools in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (47.1.0)
    Requirement already satisfied: pip in /home/mlops/.pyenv/versions/3.7.12/envs/demo/lib/python3.7/site-packages (20.1.1)

    Activating python virtual environment

    Use the pyenv activate <environment name> command to use the virtual environment created in this way.

    For example, we will use a Python virtual environment called demo.

    pyenv activate demo

    You can see that the information of the current virtual environment is printed at the front of the shell.

    Before

    mlops@ubuntu:~$ pyenv activate demo

    After

    pyenv-virtualenv: prompt changing will be removed from future release. configure `export PYENV_VIRTUALENV_DISABLE_PROMPT=1' to simulate the behavior.
    (demo) mlops@ubuntu:~$

    Deactivating python virtual environment

    You can deactivate the currently active virtualenv by using the command source deactivate.

    source deactivate

    Before

    (demo) mlops@ubuntu:~$ source deactivate

    After

    mlops@ubuntu:~$ 
    + \ No newline at end of file diff --git a/en/docs/further-readings/info/index.html b/en/docs/further-readings/info/index.html index fd8d4a20..b6c1da3b 100644 --- a/en/docs/further-readings/info/index.html +++ b/en/docs/further-readings/info/index.html @@ -7,7 +7,7 @@ - + @@ -19,8 +19,8 @@ | | Scheduling | Kubernetes | | Security & Compliance | Authentication & Authorization | Ldap | | | Data Encryption & Tokenization | Vault | -| | Governance & Auditing | Open Policy Agent |

    As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first.

    open-stacks-2.png

    For details:

    Mgmt.ComponentOpen Soruce
    Data Mgmt.CollectionKafka
    ValidationBeam
    Feature StoreFlink
    ML Model Dev. & ExperimentModelingJupyter
    Analysis & Experiment Mgmt.MLflow
    HPO Tuning & AutoMLKatib
    Deploy Mgmt.Serving FrameworkSeldon Core
    A/B TestIter8
    MonitoringGrafana, Prometheus
    Process Mgmt.pipelineKubeflow
    CI/CDGithub Action
    Continuous TrainingArgo Events
    Platform Mgmt.Configuration Mgmt.Consul
    Code Version Mgmt.Github, Minio
    Logging(EFK) Elastic Search, Fluentd, Kibana
    Resource Mgmt.Kubernetes
    - +| | Governance & Auditing | Open Policy Agent |

    As you can see, there are still many MLOps components that we have not covered yet. We could not cover them all this time due to time constraints, but if you need it, it might be a good idea to refer to the following open source projects first.

    open-stacks-2.png

    For details:

    Mgmt.ComponentOpen Soruce
    Data Mgmt.CollectionKafka
    ValidationBeam
    Feature StoreFlink
    ML Model Dev. & ExperimentModelingJupyter
    Analysis & Experiment Mgmt.MLflow
    HPO Tuning & AutoMLKatib
    Deploy Mgmt.Serving FrameworkSeldon Core
    A/B TestIter8
    MonitoringGrafana, Prometheus
    Process Mgmt.pipelineKubeflow
    CI/CDGithub Action
    Continuous TrainingArgo Events
    Platform Mgmt.Configuration Mgmt.Consul
    Code Version Mgmt.Github, Minio
    Logging(EFK) Elastic Search, Fluentd, Kibana
    Resource Mgmt.Kubernetes
    + \ No newline at end of file diff --git a/en/docs/introduction/component/index.html b/en/docs/introduction/component/index.html index 451c0f4d..cdf39eee 100644 --- a/en/docs/introduction/component/index.html +++ b/en/docs/introduction/component/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    3. Components of MLOps

    Practitioners guide to MLOps

    Google's white paper [Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning] published in May 2021 mentions the following core functionalities of MLOps:

    mlops-component

    Let's look at what each feature does.

    1. Experimentation

    Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:

    • Integration with version control tools like Git and a notebook (Jupyter Notebook) environment
    • Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics
    • Data and model analysis and visualization capabilities

    2. Data Processing

    Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:

    • Data connectors compatible with various data sources and services
    • Data encoders and decoders compatible with different data formats
    • Data transformation and feature engineering capabilities for different data types
    • Scalable batch and streaming data processing capabilities for training and serving

    3. Model Training

    Model Training offers functionalities to efficiently execute algorithms for model training:

    • Environment provisioning for ML framework execution
    • Distributed training environment for multiple GPUs and distributed training
    • Hyperparameter tuning and optimization capabilities

    4. Model Evaluation

    Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:

    • Model performance evaluation on evaluation datasets
    • Tracking prediction performance across different continuous training runs
    • Comparison and visualization of performance between different models
    • Model output interpretation using interpretable AI techniques

    5. Model Serving

    Model serving offers functionalities to deploy and serve models in production environments:

    • Low-latency and high-availability inference capabilities
    • Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)
    • Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results
    • Autoscaling capabilities to handle spiking inference requests
    • Logging of inference requests and results

    6. Online Experimentation

    Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models.

    • Canary and shadow deployment features
    • A/B testing capabilities
    • Multi-armed bandit testing functionality

    7. Model Monitoring

    Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates.

    8. ML Pipeline

    ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:

    • Pipeline execution through various event sources
    • ML metadata tracking and integration for pipeline parameter and artifact management
    • Support for built-in components for common ML tasks and user-defined components
    • Provisioning of different execution environments

    9. Model Registry

    The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository.

    • Registration, tracking, and versioning of trained and deployed models
    • Storage of information about the required data and runtime packages for deployment

    10. Dataset and Feature Repository

    • Sharing, search, reuse, and versioning capabilities for datasets
    • Real-time processing and low-latency serving capabilities for event streaming and online inference tasks
    • Support for various types of data, such as images, text, and tabular data

    11. ML Metadata and Artifact Tracking

    In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:

    • History management for ML artifacts
    • Tracking and sharing of experiments and pipeline parameter configurations
    • Storage, access, visualization, and download capabilities for ML artifacts
    • Integration with other MLOps functionalities
    - +
    Version: Next

    3. Components of MLOps

    Practitioners guide to MLOps

    Google's white paper [Practitioners guide to MLOps: A framework for continuous delivery and automation of machine learning] published in May 2021 mentions the following core functionalities of MLOps:

    mlops-component

    Let's look at what each feature does.

    1. Experimentation

    Experimentation provides machine learning engineers with the following capabilities for data analysis, prototyping model development, and implementing training functionality:

    • Integration with version control tools like Git and a notebook (Jupyter Notebook) environment
    • Experiment tracking capabilities including data used, hyperparameters, and evaluation metrics
    • Data and model analysis and visualization capabilities

    2. Data Processing

    Data Processing enables working with large volumes of data during the stages of model development, continuous training, and API deployment by providing the following functionalities:

    • Data connectors compatible with various data sources and services
    • Data encoders and decoders compatible with different data formats
    • Data transformation and feature engineering capabilities for different data types
    • Scalable batch and streaming data processing capabilities for training and serving

    3. Model Training

    Model Training offers functionalities to efficiently execute algorithms for model training:

    • Environment provisioning for ML framework execution
    • Distributed training environment for multiple GPUs and distributed training
    • Hyperparameter tuning and optimization capabilities

    4. Model Evaluation

    Model evaluation provides the following capabilities to observe the performance of models in both experimental and production environments:

    • Model performance evaluation on evaluation datasets
    • Tracking prediction performance across different continuous training runs
    • Comparison and visualization of performance between different models
    • Model output interpretation using interpretable AI techniques

    5. Model Serving

    Model serving offers functionalities to deploy and serve models in production environments:

    • Low-latency and high-availability inference capabilities
    • Support for various ML model serving frameworks (TensorFlow Serving, TorchServe, NVIDIA Triton, Scikit-learn, XGBoost, etc.)
    • Advanced inference routines, such as preprocessing or postprocessing, and multi-model ensembling for final results
    • Autoscaling capabilities to handle spiking inference requests
    • Logging of inference requests and results

    6. Online Experimentation

    Online experimentation provides capabilities to validate the performance of newly generated models when deployed. This functionality should be integrated with a Model Registry to coordinate the deployment of new models.

    • Canary and shadow deployment features
    • A/B testing capabilities
    • Multi-armed bandit testing functionality

    7. Model Monitoring

    Model monitoring enables the monitoring of deployed models in production environments to ensure proper functioning and provides information on model performance degradation and the need for updates.

    8. ML Pipeline

    ML Pipeline offers the following functionalities to configure, control, and automate complex ML training and inference workflows in production environments:

    • Pipeline execution through various event sources
    • ML metadata tracking and integration for pipeline parameter and artifact management
    • Support for built-in components for common ML tasks and user-defined components
    • Provisioning of different execution environments

    9. Model Registry

    The Model Registry provides the capability to manage the lifecycle of machine learning models in a centralized repository.

    • Registration, tracking, and versioning of trained and deployed models
    • Storage of information about the required data and runtime packages for deployment

    10. Dataset and Feature Repository

    • Sharing, search, reuse, and versioning capabilities for datasets
    • Real-time processing and low-latency serving capabilities for event streaming and online inference tasks
    • Support for various types of data, such as images, text, and tabular data

    11. ML Metadata and Artifact Tracking

    In each stage of MLOps, various artifacts are generated. ML metadata refers to the information about these artifacts. ML metadata and artifact management provide the following functionalities to manage the location, type, attributes, and associations with experiments:

    • History management for ML artifacts
    • Tracking and sharing of experiments and pipeline parameter configurations
    • Storage, access, visualization, and download capabilities for ML artifacts
    • Integration with other MLOps functionalities
    + \ No newline at end of file diff --git a/en/docs/introduction/intro/index.html b/en/docs/introduction/intro/index.html index 1cffbb60..4d813658 100644 --- a/en/docs/introduction/intro/index.html +++ b/en/docs/introduction/intro/index.html @@ -7,14 +7,14 @@ - +
    Version: Next

    1. What is MLOps?

    Machine Learning Project

    Since 2012, when Alexnet was introduced, Machine Learning and Deep Learning have been introduced in any domain where data exists, such as Computer Vision and Natural Language Processing. Deep Learning and Machine Learning were referred to collectively as AI, and the need for AI was shouted from many media. And many companies conducted numerous projects using Machine Learning and Deep Learning. But what was the result? Byungchan Eum, the Head of North East Asia at Element AI, said “If 10 companies start an AI project, 9 of them will only be able to do concept validation (POC)”.

    In this way, in many projects, Machine Learning and Deep Learning only showed the possibility that they could solve this problem and then disappeared. And around this time, the outlook that AI Winter was coming again also began to emerge.

    Why did most projects end at the concept validation (POC) stage? Because it is impossible to operate an actual service with only Machine Learning and Deep Learning code.

    At the actual service stage, the portion taken up by machine learning and deep learning code is not as large as one would think, so one must consider many other aspects besides simply the performance of the model. Google has pointed out this problem in their 2015 paper Hidden Technical Debt in Machine Learning Systems. However, at the time this paper was released, many ML engineers were busy proving the potential of deep learning and machine learning, so the points made in the paper were not given much attention.

    And after a few years, machine learning and deep learning had proven their potential and people were now looking to apply it to actual services. However, soon many people realized that actual services were not as easy as they thought.

    Devops

    MLOps is not a new concept, but rather a term derived from the development methodology called DevOps. Therefore, understanding DevOps can help in understanding MLOps.

    DevOps

    DevOps is a portmanteau of "Development" and "Operations," referring to a development and operations methodology that emphasizes communication, collaboration, and integration between software developers and IT professionals. It encompasses both the development and operation phases of software, aiming to achieve a symbiotic relationship between the two. The primary goal of DevOps is to enable organizations to develop and deploy software products and services rapidly by fostering close collaboration and interdependence between development and operations teams.

    Silo Effect

    Let's explore why DevOps is necessary through a simple scenario.

    In the early stages of a service, there are fewer supported features, and the team or company is relatively small. At this point, there may not be a clear distinction between development and operations, or the teams may be small. The key point here is the small scale. In such cases, there are many points of contact for effective communication, and with a limited number of services to focus on, it is possible to rapidly improve the service.

    However, as the service scales up, the development and operations teams tend to separate, and the physical limitations of communication channels become apparent. For example, in meetings involving multiple teams, only team leaders or a small number of seniors may attend, rather than the entire team. These limitations in communication channels inevitably lead to a lack of communication. Consequently, the development team continues to develop new features, while the operations team faces issues during deployment caused by the features developed by the development team.

    When such situations are repeated, it can lead to organizational silos, a phenomenon known as silo mentality.

    silo

    Indeed, the term "silo" originally refers to a tall, cylindrical structure used for storing grain or livestock feed. Silos are designed to keep the stored materials separate and prevent them from mixing. -In the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.

    The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations.

    CI/CD

    Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams.

    cicd

    Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams.

    MLOps

    1) ML + Ops

    DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience.

    MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.

    MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system.

    Rule-Based Approach

    In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification.

    Machine Learning Approach

    As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed.

    Deep Learning Approach

    When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed.

    By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner.

    graph

    If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning.

    If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge.

    Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively.

    2) ML -> Ops

    However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects.

    In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary.

    3) Conclusion

    In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models.

    - +In the context of organizations, the "silo effect" or "organizational silos effect" refers to a phenomenon where departments or teams within an organization operate independently and prioritize their own interests without effective collaboration. It reflects a mentality where individual departments focus on building their own "silos" and solely pursue their own interests.

    The silo effect can lead to a decline in service quality and hinder organizational performance. To address this issue, DevOps emerged as a solution. DevOps emphasizes collaboration, communication, and integration between development and operations teams, breaking down the barriers and fostering a culture of shared responsibility and collaboration. By promoting cross-functional teamwork and streamlining processes, DevOps aims to overcome silos and improve the efficiency and effectiveness of software development and operations.

    CI/CD

    Continuous Integration (CI) and Continuous Delivery (CD) are concrete methods to break down the barriers between development teams and operations teams.

    cicd

    Through this method, the development team can understand the operational environment and check whether the features being developed can be seamlessly deployed. The operations team can deploy validated features or improved products more often to increase customer product experience. In summary, DevOps is a methodology to solve the problem between development teams and operations teams.

    MLOps

    1) ML + Ops

    DevOps is a methodology that addresses the challenges between development and operations teams, promoting collaboration and effective communication. By applying DevOps principles, development teams gain a better understanding of the operational environment, and the developed features can be seamlessly integrated and deployed. On the other hand, operations teams can deploy validated features or improved products more frequently, enhancing the overall customer experience.

    MLOps, which stands for Machine Learning Operations, extends the DevOps principles and practices specifically to the field of machine learning. In MLOps, the "Dev" in DevOps is replaced with "ML" to emphasize the unique challenges and considerations related to machine learning.

    MLOps aims to address the issues that arise between machine learning teams and operations teams. To understand these issues, let's consider an example using a recommendation system.

    Rule-Based Approach

    In the initial stages of building a recommendation system, a simple rule-based approach may be used. For example, items could be recommended based on the highest sales volume in the past week. With this approach, there is no need for model updates unless there are specific reasons for modification.

    Machine Learning Approach

    As the scale of the service grows and more log data accumulates, machine learning models can be developed based on item-based or user-based recommendations. In this case, the models are periodically retrained and redeployed.

    Deep Learning Approach

    When there is a greater demand for personalized recommendations and a need for models that deliver higher performance, deep learning models are developed. Similar to machine learning, these models are periodically retrained and redeployed.

    By considering these examples, it becomes evident that challenges can arise between the machine learning team and the operations team. MLOps aims to address these challenges and provide a methodology and set of practices to facilitate the development, deployment, and operation of machine learning models in a collaborative and efficient manner.

    graph

    If we represent the concepts explained earlier on a graph, with model complexity on the x-axis and model performance on the y-axis, we can observe an upward trend where the model performance improves as the complexity increases. This often leads to the emergence of separate machine learning teams specializing in transitioning from traditional machine learning to deep learning.

    If there are only a few models to manage, collaboration between teams can be sufficient to address the challenges. However, as the number of models to develop increases, silos similar to those observed in DevOps can emerge.

    Considering the goals of DevOps, we can understand the goals of MLOps as ensuring that the developed models can be deployed successfully. While DevOps focuses on verifying that the features developed by the development team can be deployed correctly, MLOps focuses on verifying that the models developed by the machine learning team can be deployed effectively.

    2) ML -> Ops

    However, recent MLOps-related products and explanations indicate that the goals are not limited to what was previously described. In some cases, the goal is to enable the machine learning team to directly operate and manage the models they develop. This need arises from the process of ongoing machine learning projects.

    In the case of recommendation systems, it was possible to start with simple models in operations. However, in domains such as natural language processing and image analysis, it is common to perform verification (POC) to determine if deep learning models can solve the given tasks. Once the verification is complete, the focus shifts to developing the operational environment for serving the models. However, it may not be easy for the machine learning team to handle this challenge with their internal capabilities alone. This is where MLOps becomes necessary.

    3) Conclusion

    In summary, MLOps has two main goals. The earlier explanation of MLOps focused on ML+Ops, aiming to enhance productivity and collaboration between the two teams. On the other hand, the latter explanation focused on ML -> Ops, aiming to enable the machine learning team to directly operate and manage their models.

    + \ No newline at end of file diff --git a/en/docs/introduction/levels/index.html b/en/docs/introduction/levels/index.html index 43582d3f..65912791 100644 --- a/en/docs/introduction/levels/index.html +++ b/en/docs/introduction/levels/index.html @@ -7,14 +7,14 @@ - +
    Version: Next

    2. Levels of MLOps

    This page will look at the steps of MLOps outlined by Google and explore what the core features of MLOps are.

    Hidden Technical Debt in ML System

    Google has been talking about the need for MLOps since as far back as 2015. The paper Hidden Technical Debt in Machine Learning Systems encapsulates this idea from Google.

    paper

    The key takeaway from this paper is that the machine learning code is only a small part of the entire system when it comes to building products with machine learning.

    Google developed MLOps by evolving this paper and expanding the term. More details can be found on the Google Cloud homepage. In this post, we will try to explain what Google means by MLOps.

    Google divided the evolution of MLOps into three (0-2) stages. Before explaining each stage, let's review some of the concepts described in the previous post.

    In order to operate a machine learning model, there is a machine learning team responsible for developing the model and an operations team responsible for deployment and operations. MLOps is needed for the successful collaboration of these two teams. We have previously said that it can be done simply through Continuous Integration (CI) / Continuous Deployment (CD), so let us see how to do CI / CD.

    Level 0: Manual Process

    level-0

    At the 0th stage, two teams communicate through a "model". The machine learning team trains the model with accumulated data and delivers the trained model to the operation team. The operation team then deploys the model delivered in this way.

    toon

    Initial machine learning models are deployed through this "model" centered communication. However, there are several problems with this distribution method. For example, if some functions use Python 3.7 and some use Python 3.8, we often see the following situation.

    The reason for this situation lies in the characteristics of the machine learning model. Three things are needed for the trained machine learning model to work:

    1. Python code
    2. Trained weights
    3. Environment (Packages, versions)

    If any of these three aspects is communicated incorrectly, the model may fail to function or make unexpected predictions. However, in many cases, models fail to work due to environmental mismatches. Machine learning relies on various open-source libraries, and due to the nature of open-source, even the same function can produce different results depending on the version used.

    In the early stages of a service, when there are not many models to manage, these issues can be resolved quickly. However, as the number of managed features increases and communication becomes more challenging, it becomes difficult to deploy models with better performance quickly.

    Level 1: Automated ML Pipeline

    Pipeline

    level-1-pipeline

    So, in MLOps, "pipeline" is used to prevent such problems. The MLOps pipeline ensures that the model operates in the same environment as the one used by the machine learning engineer during model development, using containers like Docker. This helps prevent situations where the model doesn't work due to differences in the environment.

    However, the term "pipeline" is used in a broader context and in various tasks. What is the role of the pipeline that machine learning engineers create? The pipeline created by machine learning engineers produces trained models. Therefore, it would be more accurate to refer to it as a training pipeline rather than just a pipeline.

    Continuous Training

    level-1-ct.png

    And the concept of Continuous Training (CT) is added. So why is CT necessary?

    Auto Retrain

    In the real world, data exhibits a characteristic called "Data Shift," where the data distribution keeps changing over time. As a result, models trained in the past may experience performance degradation over time. The simplest and most effective solution to this problem is to retrain the model using recent data. By retraining the model according to the changed data distribution, it can regain its performance.

    Auto Deploy

    However, in industries such as manufacturing, where multiple recipes are processed in a single factory, it may not always be desirable to retrain the model unconditionally. One common example is the blind spot.

    For example, in an automotive production line, a model A was created and used for predictions. If an entirely different model B is introduced, it represents unseen data patterns, and a new model is trained for model B.

    Now, the model will make predictions for model B. However, if the data switches back to model A, what should be done? -If there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.

    There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy.

    To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously.

    Level 2: Automating the CI/CD Pipeline

    level-2

    The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?

    In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline.

    Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training.

    In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly.

    In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model.

    - +If there are only retraining rules, a new model for model A will be trained again. However, machine learning models require a sufficient amount of data to demonstrate satisfactory performance. The term "blind spot" refers to a period in which the model does not work while gathering enough data.

    There is a simple solution to address this blind spot. It involves checking whether there was a previous model for model A and, if so, using the previous model for prediction instead of immediately training a new model. This way, using meta-data associated with the model to automatically switch models is known as Auto Deploy.

    To summarize, for Continuous Training (CT), both Auto Retrain and Auto Deploy are necessary. They complement each other's weaknesses and enable the model's performance to be maintained continuously.

    Level 2: Automating the CI/CD Pipeline

    level-2

    The title of Step 2 is the automation of CI and CD. In DevOps, the focus of CI/CD is on source code. So what is the focus of CI/CD in MLOps?

    In MLOps, the focus of CI/CD is also on source code, but more specifically, it can be seen as the training pipeline.

    Therefore, when it comes to training models, it is important to verify whether the model is trained correctly (CI) and whether the trained model functions properly (CD) in response to relevant changes that can impact the training process. Hence, CI/CD should be performed when there are direct modifications to the code used for training.

    In addition to code, the versions of the packages used and changes in the Python version are also part of CI/CD. In many cases, machine learning utilizes open-source packages. However, open-source packages can have changes in the internal logic of functions when their versions are updated. Although notifications may be provided when there are certain version updates, significant changes in versions can go unnoticed. Therefore, when the versions of the packages used change, it is important to perform CI/CD to ensure that the model is trained and functions correctly.

    In summary, in MLOps, CI/CD focuses on the source code, particularly the training pipeline, to verify that the model is trained correctly and functions properly. This includes checking for direct code modifications and changes in package versions or Python versions to ensure the integrity of the training and functioning processes of the model.

    + \ No newline at end of file diff --git a/en/docs/introduction/why_kubernetes/index.html b/en/docs/introduction/why_kubernetes/index.html index 4346764d..18c3d499 100644 --- a/en/docs/introduction/why_kubernetes/index.html +++ b/en/docs/introduction/why_kubernetes/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    4. Why Kubernetes?

    MLOps & Kubernetes

    When talking about MLOps, why is the word Kubernetes always heard together?

    To build a successful MLOps system, various components are needed as described in Components of MLOps, but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure.

    The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as 'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster? when developing and deploying machine learning models while a few developers share a small number of clusters.

    Container

    Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as follows.

    Container: Standardized, portable packaging of an application's code, libraries, and configuration files

    But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as "It works on my notebook, why not yours?".

    One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.

    However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps.

    Container Orchestration System

    Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.

    In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues.

    For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations.

    A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier.

    How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal.

    As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems.

    According to the survey released by CNCF in 2018, Kubernetes was already showing its prominence. The survey published in 2019 indicates that 78% of respondents were using Kubernetes at a production level.

    k8s-graph

    The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.

    - +
    Version: Next

    4. Why Kubernetes?

    MLOps & Kubernetes

    When talking about MLOps, why is the word Kubernetes always heard together?

    To build a successful MLOps system, various components are needed as described in Components of MLOps, but to operate them organically at the infrastructure level, there are many issues to be solved. For example, simply running a large number of machine learning model requests in order, ensuring the same execution environment in other workspaces, and responding quickly when a deployed service has a failure.

    The need for containers and container orchestration systems appears here. With the introduction of container orchestration systems such as Kubernetes, efficient isolation and management of execution environments can be achieved. By introducing a container orchestration system, it is possible to prevent situations such as 'Is anyone using cluster 1?', 'Who killed my process that was using GPU?', 'Who updated the x package on the cluster? when developing and deploying machine learning models while a few developers share a small number of clusters.

    Container

    Microsoft defines a container as follows: What is a container then? In Microsoft, a container is defined as follows.

    Container: Standardized, portable packaging of an application's code, libraries, and configuration files

    But why is a container needed for machine learning? Machine learning models can behave differently depending on the operating system, Python execution environment, package version, etc. To prevent this, the technology used to share and execute the entire dependent execution environment with the source code used in machine learning is called containerization technology. This packaged form is called a container image, and by sharing the container image, users can ensure the same execution results on any system. In other words, by sharing not just the Jupyter Notebook file or the source code and requirements.txt file of the model, but the entire container image with the execution environment, you can avoid situations such as "It works on my notebook, why not yours?".

    One translation of the Korean sentence to English is: "One of the common misunderstandings that people who are new to containers often make is to assume that "container == Docker". Docker is not a concept that has the same meaning as containers; rather, it is a tool that provides features to make it easier and more flexible to use containers, such as launching containers and creating and sharing container images. In summary, container is a virtualization technology, and Docker is an implementation of virtualization technology.

    However, Docker has become the mainstream quickly due to its easy usability and high efficiency among various container virtualization tools, so when people think of containers, they often think of Docker automatically. There are various reasons why the container and Docker ecosystem have become the mainstream, but for technical reasons, I won't go into that detail since it is outside the scope of Everybody's MLOps.

    Container Orchestration System

    Then what is a container orchestration system? As inferred from the word "orchestration," it can be compared to a system that coordinates the operation of numerous containers to work together harmoniously.

    In container-based systems, services are provided to users in the form of containers. If the number of containers to be managed is small, a single operator can sufficiently handle all situations. However, if there are hundreds of containers running in dozens of clusters and they need to function continuously without causing any failures, it becomes nearly impossible for a single operator to monitor the proper functioning of all services and respond to issues.

    For example, continuous monitoring is required to ensure that all services are functioning properly. If a specific service experiences a failure, the operator needs to investigate the problem by examining the logs of multiple containers. Additionally, they need to handle various tasks such as scheduling and load balancing to prevent work overload on specific clusters or containers, as well as scaling operations.

    A container orchestration system is software that provides functionality to manage and operate the states of numerous containers continuously and automatically, making the process of managing and operating a large number of containers somewhat easier.

    How can it be used in machine learning? For example, a container that packages deep learning training code that requires a GPU can be executed on a cluster with available GPUs. A container that packages data preprocessing code requiring a large amount of memory can be executed on a cluster with ample memory. If there is an issue with the cluster during training, the system can automatically move the same container to a different cluster and continue the training, eliminating the need for manual intervention. Developing such a system that automates management without requiring manual intervention is the goal.

    As of the writing of this text in 2022, Kubernetes is considered the de facto standard for container orchestration systems.

    According to the survey released by CNCF in 2018, Kubernetes was already showing its prominence. The survey published in 2019 indicates that 78% of respondents were using Kubernetes at a production level.

    k8s-graph

    The growth of the Kubernetes ecosystem can be attributed to various reasons. However, similar to Docker, Kubernetes is not exclusively limited to machine learning-based services. Since delving into detailed technical content would require a substantial amount of discussion, this edition of "MLOps for ALL" will omit the detailed explanation of Kubernetes.

    + \ No newline at end of file diff --git a/en/docs/kubeflow-dashboard-guide/experiments-and-others/index.html b/en/docs/kubeflow-dashboard-guide/experiments-and-others/index.html index 9f181e09..322207e3 100644 --- a/en/docs/kubeflow-dashboard-guide/experiments-and-others/index.html +++ b/en/docs/kubeflow-dashboard-guide/experiments-and-others/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    6. Kubeflow Pipeline Relates

    In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs.

    left-tabs

    Kubeflow Pipelines are the main reason for using Kubeflow in MLOps for ALL, and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in 3.Kubeflow.

    - +
    Version: Next

    6. Kubeflow Pipeline Relates

    In the left tabs of the Central Dashboard (KFP Experiments, Pipelines, Runs, Recurring Runs, Artifacts, Executions) you can manage Kubeflow Pipelines and the results of Pipeline execution and Pipeline Runs.

    left-tabs

    Kubeflow Pipelines are the main reason for using Kubeflow in MLOps for ALL, and details on how to create, execute, and check the results of Kubeflow Pipelines can be found in 3.Kubeflow.

    + \ No newline at end of file diff --git a/en/docs/kubeflow-dashboard-guide/experiments/index.html b/en/docs/kubeflow-dashboard-guide/experiments/index.html index 380ee3ff..75d86e0f 100644 --- a/en/docs/kubeflow-dashboard-guide/experiments/index.html +++ b/en/docs/kubeflow-dashboard-guide/experiments/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    5. Experiments(AutoML)

    Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard.

    left-tabs

    automl

    The Experiments(AutoML) page is where you can manage Katib, which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow.

    The usage of Katib and Experiments(AutoML) is not covered in MLOps for Everyone v1.0, and will be added in v2.0.

    - +
    Version: Next

    5. Experiments(AutoML)

    Next, we will click the Experiments(AutoML) tab on the left of the Central Dashboard.

    left-tabs

    automl

    The Experiments(AutoML) page is where you can manage Katib, which is responsible for AutoML through Hyperparameter Tuning and Neural Architecture Search in Kubeflow.

    The usage of Katib and Experiments(AutoML) is not covered in MLOps for Everyone v1.0, and will be added in v2.0.

    + \ No newline at end of file diff --git a/en/docs/kubeflow-dashboard-guide/intro/index.html b/en/docs/kubeflow-dashboard-guide/intro/index.html index 5a523e4d..33213353 100644 --- a/en/docs/kubeflow-dashboard-guide/intro/index.html +++ b/en/docs/kubeflow-dashboard-guide/intro/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    1. Central Dashboard

    Once you have completed Kubeflow installation, you can access the dashboard through the following command.

    kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

    after-login

    The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side

    left-tabs

    • Home
    • Notebooks
    • Tensorboards
    • Volumes
    • Models
    • Experiments(AutoML)
    • Experiments(KFP)
    • Pipelines
    • Runs
    • Recurring Runs
    • Artifacts
    • Executions

    Let's now look at the simple usage of each feature.

    - +
    Version: Next

    1. Central Dashboard

    Once you have completed Kubeflow installation, you can access the dashboard through the following command.

    kubectl port-forward --address 0.0.0.0 svc/istio-ingressgateway -n istio-system 8080:80

    after-login

    The Central Dashboard is a UI that integrates all the features provided by Kubeflow. The features provided by the Central Dashboard can be divided based on the tabs on the left side

    left-tabs

    • Home
    • Notebooks
    • Tensorboards
    • Volumes
    • Models
    • Experiments(AutoML)
    • Experiments(KFP)
    • Pipelines
    • Runs
    • Recurring Runs
    • Artifacts
    • Executions

    Let's now look at the simple usage of each feature.

    + \ No newline at end of file diff --git a/en/docs/kubeflow-dashboard-guide/notebooks/index.html b/en/docs/kubeflow-dashboard-guide/notebooks/index.html index 20e5e5d5..0b102f3a 100644 --- a/en/docs/kubeflow-dashboard-guide/notebooks/index.html +++ b/en/docs/kubeflow-dashboard-guide/notebooks/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    2. Notebooks

    Launch Notebook Server

    Click on the Notebooks tab on the left side of the Central Dashboard.

    left-tabs

    You will see a similar screen.

    The Notebooks tab is a page where users can independently create and access jupyter notebook and code server environments (hereinafter referred to as a notebook server).

    notebook-home

    Click the "+ NEW NOTEBOOK" button at the top right.

    new-notebook

    When the screen shown below appears, now specify the spec (Spec) of the notebook server to be created.

    create

    For details for spec:
    • name:
      • Specifies a name to identify the notebook server.
    • namespace:
      • Cannot be changed. (It is automatically set to the namespace of the currently logged-in user account.)
    • Image:
      • Selects the image to use from pre-installed JupyterLab images with Python packages like sklearn, pytorch, tensorflow, etc.
        • If you want to use an image that utilizes GPU within the notebook server, refer to the GPUs section below.
      • If you want to use a custom notebook server that includes additional packages or source code, you can create a custom image and deploy it for use.
    • CPU / RAM:
      • Specifies the amount of resources required.
        • cpu: in core units
          • Represents the number of virtual cores, and can also be specified as a float value such as 1.5, 2.7, etc.
        • memory: in Gi units
    • GPUs:
      • Specifies the number of GPUs to allocate to the Jupyter notebook.
        • None
          • When GPU resources are not required.
        • 1, 2, 4
          • Allocates 1, 2, or 4 GPUs.
      • GPU Vendor:
        • If you have followed the (Optional) Setup GPU guide and installed the NVIDIA GPU plugin, select NVIDIA.
    • Workspace Volume:
      • Specifies the amount of disk space required within the notebook server.
      • Do not change the Type and Name fields unless you want to increase the disk space or change the AccessMode.
        • Check the "Don't use Persistent Storage for User's home" checkbox only if it is not necessary to save the notebook server's work. It is generally recommended not to check this option.
        • If you want to use a pre-existing Persistent Volume Claim (PVC), select Type as "Existing" and enter the name of the PVC to use.
    • Data Volumes:
      • If additional storage resources are required, click the "+ ADD VOLUME" button to create them.
    • Configurations, Affinity/Tolerations, Miscellaneous Settings
      • These are generally not needed, so detailed explanations are omitted in MLOps for All.

    If you followed the Setup GPU (Optional), select NVIDIA if you have installed the nvidia gpu plugin.

    creating

    After creation, the Status will change to a green check mark icon, and the CONNECT button will be activated. created


    Accessing the Notebook Server

    Clicking the CONNECT button will open a new browser window, where you will see the following screen:

    notebook-access

    You can use the Notebook, Console, and Terminal icons in the Launcher to start using them.

    Notebook Interface

    notebook-console

    Terminal Interface

    terminal-console


    Stopping the Notebook Server

    If you haven't used the notebook server for an extended period of time, you can stop it to optimize resource usage in the Kubernetes cluster. Note that stopping the notebook server will result in the deletion of all data stored outside the Workspace Volume or Data Volume specified when creating the notebook server.
    -If you haven't changed the path during notebook server creation, the default Workspace Volume path is /home/jovyan inside the notebook server, so any data stored outside the /home/jovyan directory will be deleted.

    Clicking the STOP button as shown below will stop the notebook server:

    notebook-stop

    Once the server is stopped, the CONNECT button will be disabled. To restart the notebook server and use it again, click the PLAY button.

    notebook-restart

    - +If you haven't changed the path during notebook server creation, the default Workspace Volume path is /home/jovyan inside the notebook server, so any data stored outside the /home/jovyan directory will be deleted.

    Clicking the STOP button as shown below will stop the notebook server:

    notebook-stop

    Once the server is stopped, the CONNECT button will be disabled. To restart the notebook server and use it again, click the PLAY button.

    notebook-restart

    + \ No newline at end of file diff --git a/en/docs/kubeflow-dashboard-guide/tensorboards/index.html b/en/docs/kubeflow-dashboard-guide/tensorboards/index.html index 69a29492..7f3fbcdb 100644 --- a/en/docs/kubeflow-dashboard-guide/tensorboards/index.html +++ b/en/docs/kubeflow-dashboard-guide/tensorboards/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    3. Tensorboards

    Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next.

    left-tabs

    We can see the following screen.

    tensorboard

    The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes.

    You can refer to the TensorBoard documentation for more information on using TensorBoard with Kubeflow Pipeline runs.

    There are various ways to visualize the results of Kubeflow Pipeline runs, and in MLOps for ALL, we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context.

    - +
    Version: Next

    3. Tensorboards

    Let's click on the Tensorboards tab of the left tabs of the Central Dashboard next.

    left-tabs

    We can see the following screen.

    tensorboard

    The TensorBoard server created in this way can be used just like a regular remote TensorBoard server, or it can be used for the purpose of storing data directly from a Kubeflow Pipeline run for visualization purposes.

    You can refer to the TensorBoard documentation for more information on using TensorBoard with Kubeflow Pipeline runs.

    There are various ways to visualize the results of Kubeflow Pipeline runs, and in MLOps for ALL, we will utilize the Visualization feature of Kubeflow components and the visualization capabilities of MLflow to enable more general use cases. Therefore, detailed explanations of the TensorBoards page will be omitted in this context.

    + \ No newline at end of file diff --git a/en/docs/kubeflow-dashboard-guide/volumes/index.html b/en/docs/kubeflow-dashboard-guide/volumes/index.html index 1ae0c046..245aa02c 100644 --- a/en/docs/kubeflow-dashboard-guide/volumes/index.html +++ b/en/docs/kubeflow-dashboard-guide/volumes/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    4. Volumes

    Volumes

    Next, let's click on the Volumes tab in the left of the Central Dashboard.

    left-tabs

    You will see the following screen.

    volumes

    Volumes tab provides the functionality to manage the Persistent Volume Claims (PVC) belonging to the current user's namespace in Kubernetes' Volume (Volume).

    By looking at the screenshot, you can see the information of the Volume created on the 1. Notebooks page. It can be seen that the Storage Class of the Volume is set to local-path, which is the Default Storage Class installed at the time of Kubernetes cluster installation.

    In addition, the Volumes page can be used if you want to create, view, or delete a new Volume in the user namespace.


    Creating a Volume

    By clicking the + NEW VOLUME button at the top right, you can see the following screen.

    new-volume

    You can create a volume by specifying its name, size, storage class, and access mode.

    When you specify the desired resource specs to create a volume, its Status will be shown as Pending on this page. When you hover over the Status icon, you will see a message that this (This volume will be bound when its first consumer is created.)
    This is according to the volume creation policy of the StorageClass used in the lab, which is local-path. This is not a problem situation.
    -When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time.

    creating-volume

    - +When the Status is shown as Pending on this page, you can still specify the name of the volume in the notebook server or pod that you want to use the volume and the volume creation will be triggered at that time.

    creating-volume

    + \ No newline at end of file diff --git a/en/docs/kubeflow/advanced-component/index.html b/en/docs/kubeflow/advanced-component/index.html index 488af63c..d4d1204e 100644 --- a/en/docs/kubeflow/advanced-component/index.html +++ b/en/docs/kubeflow/advanced-component/index.html @@ -7,7 +7,7 @@ - + @@ -19,8 +19,8 @@ All the _path suffixes have disappeared from the arguments received in the input and output.
    We can see that instead of accessing iris_data.outputs["data_path"], we are accessing iris_data.outputs["data"].
    This happens because Kubeflow has a rule that paths created with InputPath and OutputPath can be accessed without the _path suffix when accessed from the pipeline.

    However, if you upload the pipeline just written, it will not run.
    -The reason is explained on the next page.

    - +The reason is explained on the next page.

    + \ No newline at end of file diff --git a/en/docs/kubeflow/advanced-environment/index.html b/en/docs/kubeflow/advanced-environment/index.html index 92596b02..0c5715ce 100644 --- a/en/docs/kubeflow/advanced-environment/index.html +++ b/en/docs/kubeflow/advanced-environment/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Kubeflow uses Kubernetes, so the component wrapper runs the component content on its own separate container.

    In detail, the image specified in the generated train_from_csv.yaml is image: python:3.7.

    There may be some people who notice why it is not running for some reason.

    The python:3.7 image does not have the packages we want to use, such as dill, pandas, and sklearn, installed.
    Therefore, when executing, it fails with an error indicating that the packages are not found.

    So, how can we add the packages?

    Adding packages

    During the process of converting Kubeflow, there are two ways to add packages:

    1. Using base_image
    2. Using package_to_install

    Let's check what arguments the function create_component_from_func used to compile the components can receive.

    def create_component_from_func(
    func: Callable,
    output_component_file: Optional[str] = None,
    base_image: Optional[str] = None,
    packages_to_install: List[str] = None,
    annotations: Optional[Mapping[str, str]] = None,
    ):
    • func: Function that creates the component wrapper to be made into a component.
    • base_image: Image that the component wrapper will run on.
    • packages_to_install: Additional packages that need to be installed for the component to use.

    1. base_image

    Take a closer look at the sequence in which the component is executed and it will be as follows:

    1. docker pull base_image
    2. pip install packages_to_install
    3. run command

    If the base_image used by the component already has all the packages installed, you can use it without installing additional packages.

    For example, on this page we are going to write a Dockerfile like this:

    FROM python:3.7

    RUN pip install dill pandas scikit-learn

    Let's build the image using the Dockerfile above. The Docker hub we will use for the practice is ghcr.
    You can choose a Docker hub according to your environment and upload it.

    docker build . -f Dockerfile -t ghcr.io/mlops-for-all/base-image
    docker push ghcr.io/mlops-for-all/base-image

    Now let's try inputting the base image.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    base_image="ghcr.io/mlops-for-all/base-image:latest",
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    If you compile the generated component, it will appear as follows.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: ghcr.io/mlops-for-all/base-image:latest
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    We can confirm that the base_image has been changed to the value we have set.

    2. packages_to_install

    However, when packages are added, it takes a lot of time to create a new Docker image. -In this case, we can use the packages_to_install argument to easily add packages to the container.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    If you execute the script, the train_from_csv.yaml file will be generated.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    If we take a closer look at the order in which the components written above are executed, it looks like this:

    1. docker pull python:3.7
    2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
    3. run command

    When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors.

        command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - +In this case, we can use the packages_to_install argument to easily add packages to the container.

    from functools import partial
    from kfp.components import InputPath, OutputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill==0.3.4", "pandas==1.3.4", "scikit-learn==1.0.1"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    if __name__ == "__main__":
    train_from_csv.component_spec.save("train_from_csv.yaml")

    If you execute the script, the train_from_csv.yaml file will be generated.

    name: Train from csv
    inputs:
    - {name: train_data, type: csv}
    - {name: train_target, type: csv}
    - {name: kernel, type: String}
    outputs:
    - {name: model, type: dill}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    kernel,
    ):
    import dill
    import pandas as pd

    from sklearn.svm import SVC

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    args:
    - --train-data
    - {inputPath: train_data}
    - --train-target
    - {inputPath: train_target}
    - --kernel
    - {inputValue: kernel}
    - --model
    - {outputPath: model}

    If we take a closer look at the order in which the components written above are executed, it looks like this:

    1. docker pull python:3.7
    2. pip install dill==0.3.4 pandas==1.3.4 scikit-learn==1.0.1
    3. run command

    When the generated yaml file is closely examined, the following lines are automatically added, so that the necessary packages are installed and the program runs smoothly without errors.

        command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill==0.3.4' 'pandas==1.3.4' 'scikit-learn==1.0.1' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill==0.3.4' 'pandas==1.3.4'
    'scikit-learn==1.0.1' --user) && "$0" "$@"
    + \ No newline at end of file diff --git a/en/docs/kubeflow/advanced-mlflow/index.html b/en/docs/kubeflow/advanced-mlflow/index.html index 21a6fc86..380dc1ed 100644 --- a/en/docs/kubeflow/advanced-mlflow/index.html +++ b/en/docs/kubeflow/advanced-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -21,8 +21,8 @@ At this time, configure the uploaded MLFlow endpoint to be connected to the mlflow service that we installed.
    In this case, use the Kubernetes Service DNS Name of the Minio installed at the time of MLFlow Server installation. As this service is created in the Kubeflow namespace with the name minio-service, set it to http://minio-service.kubeflow.svc:9000.
    Similarly, for the tracking_uri address, use the Kubernetes Service DNS Name of the MLFlow server and set it to http://mlflow-server-service.mlflow-system.svc:5000.

    from functools import partial
    from kfp.components import InputPath, create_component_from_func

    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    MLFlow Pipeline

    Now let's connect the components we have written and create a pipeline.

    Data Component

    The data we will use to train the model is sklearn's iris. -We will write a component to generate the data.

    from functools import partial

    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    Pipeline

    The pipeline code can be written as follows.

    from kfp.dsl import pipeline


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )

    Run

    If you organize the components and pipelines written above into a single Python file, it would look like this.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

    mlflow_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: mlflow-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
    "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
    spec:
    entrypoint: mlflow-pipeline
    templates:
    - name: load-iris-data
    container:
    args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
    && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def load_iris_data(
    data_path,
    target_path,
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    import argparse
    _parser = argparse.ArgumentParser(prog='Load iris data', description='')
    _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = load_iris_data(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: load-iris-data-data, path: /tmp/outputs/data/data}
    - {name: load-iris-data-target, path: /tmp/outputs/target/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
    PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    ''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
    pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
    = pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
    = pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
    index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
    = argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
    dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
    "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
    "data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
    - name: mlflow-pipeline
    inputs:
    parameters:
    - {name: kernel}
    - {name: model_name}
    dag:
    tasks:
    - {name: load-iris-data, template: load-iris-data}
    - name: train-from-csv
    template: train-from-csv
    dependencies: [load-iris-data]
    arguments:
    parameters:
    - {name: kernel, value: '{{inputs.parameters.kernel}}'}
    artifacts:
    - {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
    - {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
    - name: upload-sklearn-model-to-mlflow
    template: upload-sklearn-model-to-mlflow
    dependencies: [train-from-csv]
    arguments:
    parameters:
    - {name: model_name, value: '{{inputs.parameters.model_name}}'}
    artifacts:
    - {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
    - {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
    - {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
    - {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
    - name: train-from-csv
    container:
    args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
    --kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
    --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
    --conda-env, /tmp/outputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    kernel,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: kernel}
    artifacts:
    - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
    - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
    outputs:
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/outputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
    {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
    {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
    "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
    "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
    ''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
    pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
    dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
    mlflow.models.signature import infer_signature\n from mlflow.utils.environment
    import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
    = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
    train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
    file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
    \"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
    = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
    \"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
    = _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
    \"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
    file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
    from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
    type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
    dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
    dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
    {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
    "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
    {"name": "input_example", "type": "dill"}, {"name": "signature", "type":
    "dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
    - name: upload-sklearn-model-to-mlflow
    container:
    args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
    --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
    --conda-env, /tmp/inputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' 'boto3' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def upload_sklearn_model_to_mlflow(
    model_name,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    import argparse
    _parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
    _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: model_name}
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/inputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
    "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
    {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
    --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
    ''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
    os\n import dill\n from mlflow.sklearn import save_model\n\n from
    mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
    = \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
    = \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
    = MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
    open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
    open(input_example_path, \"rb\") as file_reader:\n input_example
    = dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
    = dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
    = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
    = client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
    model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
    sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
    dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
    dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
    dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
    {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
    {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
    "name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
    arguments:
    parameters:
    - {name: kernel}
    - {name: model_name}
    serviceAccountName: pipeline-runner

    After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run.

    mlflow-svc-0

    Port-forward the mlflow service to access the MLflow UI.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows.

    mlflow-svc-1

    Click on run to verify that the trained model file is present.

    mlflow-svc-2

    - +We will write a component to generate the data.

    from functools import partial

    from kfp.components import InputPath, OutputPath, create_component_from_func


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    Pipeline

    The pipeline code can be written as follows.

    from kfp.dsl import pipeline


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )

    Run

    If you organize the components and pipelines written above into a single Python file, it would look like this.

    from functools import partial

    import kfp
    from kfp.components import InputPath, OutputPath, create_component_from_func
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["pandas", "scikit-learn"],
    )
    def load_iris_data(
    data_path: OutputPath("csv"),
    target_path: OutputPath("csv"),
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow"],
    )
    def train_from_csv(
    train_data_path: InputPath("csv"),
    train_target_path: InputPath("csv"),
    model_path: OutputPath("dill"),
    input_example_path: OutputPath("dill"),
    signature_path: OutputPath("dill"),
    conda_env_path: OutputPath("dill"),
    kernel: str,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)


    @partial(
    create_component_from_func,
    packages_to_install=["dill", "pandas", "scikit-learn", "mlflow", "boto3"],
    )
    def upload_sklearn_model_to_mlflow(
    model_name: str,
    model_path: InputPath("dill"),
    input_example_path: InputPath("dill"),
    signature_path: InputPath("dill"),
    conda_env_path: InputPath("dill"),
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)


    @pipeline(name="mlflow_pipeline")
    def mlflow_pipeline(kernel: str, model_name: str):
    iris_data = load_iris_data()
    model = train_from_csv(
    train_data=iris_data.outputs["data"],
    train_target=iris_data.outputs["target"],
    kernel=kernel,
    )
    _ = upload_sklearn_model_to_mlflow(
    model_name=model_name,
    model=model.outputs["model"],
    input_example=model.outputs["input_example"],
    signature=model.outputs["signature"],
    conda_env=model.outputs["conda_env"],
    )


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(mlflow_pipeline, "mlflow_pipeline.yaml")

    mlflow_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: mlflow-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10, pipelines.kubeflow.org/pipeline_compilation_time: '2022-01-19T14:14:11.999807',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "kernel", "type":
    "String"}, {"name": "model_name", "type": "String"}], "name": "mlflow_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.10}
    spec:
    entrypoint: mlflow-pipeline
    templates:
    - name: load-iris-data
    container:
    args: [--data, /tmp/outputs/data/data, --target, /tmp/outputs/target/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location 'pandas' 'scikit-learn' --user)
    && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def load_iris_data(
    data_path,
    target_path,
    ):
    import pandas as pd
    from sklearn.datasets import load_iris

    iris = load_iris()

    data = pd.DataFrame(iris["data"], columns=iris["feature_names"])
    target = pd.DataFrame(iris["target"], columns=["target"])

    data.to_csv(data_path, index=False)
    target.to_csv(target_path, index=False)

    import argparse
    _parser = argparse.ArgumentParser(prog='Load iris data', description='')
    _parser.add_argument("--data", dest="data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--target", dest="target_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = load_iris_data(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: load-iris-data-data, path: /tmp/outputs/data/data}
    - {name: load-iris-data-target, path: /tmp/outputs/target/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--data", {"outputPath": "data"}, "--target", {"outputPath": "target"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''pandas'' ''scikit-learn'' ||
    PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    ''pandas'' ''scikit-learn'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef load_iris_data(\n data_path,\n target_path,\n):\n import
    pandas as pd\n from sklearn.datasets import load_iris\n\n iris = load_iris()\n\n data
    = pd.DataFrame(iris[\"data\"], columns=iris[\"feature_names\"])\n target
    = pd.DataFrame(iris[\"target\"], columns=[\"target\"])\n\n data.to_csv(data_path,
    index=False)\n target.to_csv(target_path, index=False)\n\nimport argparse\n_parser
    = argparse.ArgumentParser(prog=''Load iris data'', description='''')\n_parser.add_argument(\"--data\",
    dest=\"data_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--target\", dest=\"target_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = load_iris_data(**_parsed_args)\n"],
    "image": "python:3.7"}}, "name": "Load iris data", "outputs": [{"name":
    "data", "type": "csv"}, {"name": "target", "type": "csv"}]}', pipelines.kubeflow.org/component_ref: '{}'}
    - name: mlflow-pipeline
    inputs:
    parameters:
    - {name: kernel}
    - {name: model_name}
    dag:
    tasks:
    - {name: load-iris-data, template: load-iris-data}
    - name: train-from-csv
    template: train-from-csv
    dependencies: [load-iris-data]
    arguments:
    parameters:
    - {name: kernel, value: '{{inputs.parameters.kernel}}'}
    artifacts:
    - {name: load-iris-data-data, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-data}}'}
    - {name: load-iris-data-target, from: '{{tasks.load-iris-data.outputs.artifacts.load-iris-data-target}}'}
    - name: upload-sklearn-model-to-mlflow
    template: upload-sklearn-model-to-mlflow
    dependencies: [train-from-csv]
    arguments:
    parameters:
    - {name: model_name, value: '{{inputs.parameters.model_name}}'}
    artifacts:
    - {name: train-from-csv-conda_env, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-conda_env}}'}
    - {name: train-from-csv-input_example, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-input_example}}'}
    - {name: train-from-csv-model, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-model}}'}
    - {name: train-from-csv-signature, from: '{{tasks.train-from-csv.outputs.artifacts.train-from-csv-signature}}'}
    - name: train-from-csv
    container:
    args: [--train-data, /tmp/inputs/train_data/data, --train-target, /tmp/inputs/train_target/data,
    --kernel, '{{inputs.parameters.kernel}}', --model, /tmp/outputs/model/data,
    --input-example, /tmp/outputs/input_example/data, --signature, /tmp/outputs/signature/data,
    --conda-env, /tmp/outputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path

    def train_from_csv(
    train_data_path,
    train_target_path,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    kernel,
    ):
    import dill
    import pandas as pd
    from sklearn.svm import SVC

    from mlflow.models.signature import infer_signature
    from mlflow.utils.environment import _mlflow_conda_env

    train_data = pd.read_csv(train_data_path)
    train_target = pd.read_csv(train_target_path)

    clf = SVC(kernel=kernel)
    clf.fit(train_data, train_target)

    with open(model_path, mode="wb") as file_writer:
    dill.dump(clf, file_writer)

    input_example = train_data.sample(1)
    with open(input_example_path, "wb") as file_writer:
    dill.dump(input_example, file_writer)

    signature = infer_signature(train_data, clf.predict(train_data))
    with open(signature_path, "wb") as file_writer:
    dill.dump(signature, file_writer)

    conda_env = _mlflow_conda_env(
    additional_pip_deps=["dill", "pandas", "scikit-learn"]
    )
    with open(conda_env_path, "wb") as file_writer:
    dill.dump(conda_env, file_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Train from csv', description='')
    _parser.add_argument("--train-data", dest="train_data_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--train-target", dest="train_target_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--kernel", dest="kernel", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = train_from_csv(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: kernel}
    artifacts:
    - {name: load-iris-data-data, path: /tmp/inputs/train_data/data}
    - {name: load-iris-data-target, path: /tmp/inputs/train_target/data}
    outputs:
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/outputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/outputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/outputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/outputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--train-data", {"inputPath": "train_data"}, "--train-target",
    {"inputPath": "train_target"}, "--kernel", {"inputValue": "kernel"}, "--model",
    {"outputPath": "model"}, "--input-example", {"outputPath": "input_example"},
    "--signature", {"outputPath": "signature"}, "--conda-env", {"outputPath":
    "conda_env"}], "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''dill'' ''pandas''
    ''scikit-learn'' ''mlflow'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m
    pip install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef train_from_csv(\n train_data_path,\n train_target_path,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n kernel,\n):\n import
    dill\n import pandas as pd\n from sklearn.svm import SVC\n\n from
    mlflow.models.signature import infer_signature\n from mlflow.utils.environment
    import _mlflow_conda_env\n\n train_data = pd.read_csv(train_data_path)\n train_target
    = pd.read_csv(train_target_path)\n\n clf = SVC(kernel=kernel)\n clf.fit(train_data,
    train_target)\n\n with open(model_path, mode=\"wb\") as file_writer:\n dill.dump(clf,
    file_writer)\n\n input_example = train_data.sample(1)\n with open(input_example_path,
    \"wb\") as file_writer:\n dill.dump(input_example, file_writer)\n\n signature
    = infer_signature(train_data, clf.predict(train_data))\n with open(signature_path,
    \"wb\") as file_writer:\n dill.dump(signature, file_writer)\n\n conda_env
    = _mlflow_conda_env(\n additional_pip_deps=[\"dill\", \"pandas\",
    \"scikit-learn\"]\n )\n with open(conda_env_path, \"wb\") as file_writer:\n dill.dump(conda_env,
    file_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Train
    from csv'', description='''')\n_parser.add_argument(\"--train-data\", dest=\"train_data_path\",
    type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--train-target\",
    dest=\"train_target_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--kernel\",
    dest=\"kernel\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\", dest=\"input_example_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=_make_parent_dirs_and_return_path, required=True,
    default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\", dest=\"conda_env_path\",
    type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = train_from_csv(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "train_data", "type": "csv"},
    {"name": "train_target", "type": "csv"}, {"name": "kernel", "type": "String"}],
    "name": "Train from csv", "outputs": [{"name": "model", "type": "dill"},
    {"name": "input_example", "type": "dill"}, {"name": "signature", "type":
    "dill"}, {"name": "conda_env", "type": "dill"}]}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"kernel": "{{inputs.parameters.kernel}}"}'}
    - name: upload-sklearn-model-to-mlflow
    container:
    args: [--model-name, '{{inputs.parameters.model_name}}', --model, /tmp/inputs/model/data,
    --input-example, /tmp/inputs/input_example/data, --signature, /tmp/inputs/signature/data,
    --conda-env, /tmp/inputs/conda_env/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'dill' 'pandas' 'scikit-learn' 'mlflow' 'boto3' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location 'dill' 'pandas' 'scikit-learn'
    'mlflow' 'boto3' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def upload_sklearn_model_to_mlflow(
    model_name,
    model_path,
    input_example_path,
    signature_path,
    conda_env_path,
    ):
    import os
    import dill
    from mlflow.sklearn import save_model

    from mlflow.tracking.client import MlflowClient

    os.environ["MLFLOW_S3_ENDPOINT_URL"] = "http://minio-service.kubeflow.svc:9000"
    os.environ["AWS_ACCESS_KEY_ID"] = "minio"
    os.environ["AWS_SECRET_ACCESS_KEY"] = "minio123"

    client = MlflowClient("http://mlflow-server-service.mlflow-system.svc:5000")

    with open(model_path, mode="rb") as file_reader:
    clf = dill.load(file_reader)

    with open(input_example_path, "rb") as file_reader:
    input_example = dill.load(file_reader)

    with open(signature_path, "rb") as file_reader:
    signature = dill.load(file_reader)

    with open(conda_env_path, "rb") as file_reader:
    conda_env = dill.load(file_reader)

    save_model(
    sk_model=clf,
    path=model_name,
    serialization_format="cloudpickle",
    conda_env=conda_env,
    signature=signature,
    input_example=input_example,
    )
    run = client.create_run(experiment_id="0")
    client.log_artifact(run.info.run_id, model_name)

    import argparse
    _parser = argparse.ArgumentParser(prog='Upload sklearn model to mlflow', description='')
    _parser.add_argument("--model-name", dest="model_name", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--input-example", dest="input_example_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--signature", dest="signature_path", type=str, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--conda-env", dest="conda_env_path", type=str, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = upload_sklearn_model_to_mlflow(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: model_name}
    artifacts:
    - {name: train-from-csv-conda_env, path: /tmp/inputs/conda_env/data}
    - {name: train-from-csv-input_example, path: /tmp/inputs/input_example/data}
    - {name: train-from-csv-model, path: /tmp/inputs/model/data}
    - {name: train-from-csv-signature, path: /tmp/inputs/signature/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.10
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--model-name", {"inputValue": "model_name"}, "--model", {"inputPath":
    "model"}, "--input-example", {"inputPath": "input_example"}, "--signature",
    {"inputPath": "signature"}, "--conda-env", {"inputPath": "conda_env"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn''
    ''mlflow'' ''boto3'' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install
    --quiet --no-warn-script-location ''dill'' ''pandas'' ''scikit-learn'' ''mlflow''
    ''boto3'' --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def upload_sklearn_model_to_mlflow(\n model_name,\n model_path,\n input_example_path,\n signature_path,\n conda_env_path,\n):\n import
    os\n import dill\n from mlflow.sklearn import save_model\n\n from
    mlflow.tracking.client import MlflowClient\n\n os.environ[\"MLFLOW_S3_ENDPOINT_URL\"]
    = \"http://minio-service.kubeflow.svc:9000\"\n os.environ[\"AWS_ACCESS_KEY_ID\"]
    = \"minio\"\n os.environ[\"AWS_SECRET_ACCESS_KEY\"] = \"minio123\"\n\n client
    = MlflowClient(\"http://mlflow-server-service.mlflow-system.svc:5000\")\n\n with
    open(model_path, mode=\"rb\") as file_reader:\n clf = dill.load(file_reader)\n\n with
    open(input_example_path, \"rb\") as file_reader:\n input_example
    = dill.load(file_reader)\n\n with open(signature_path, \"rb\") as file_reader:\n signature
    = dill.load(file_reader)\n\n with open(conda_env_path, \"rb\") as file_reader:\n conda_env
    = dill.load(file_reader)\n\n save_model(\n sk_model=clf,\n path=model_name,\n serialization_format=\"cloudpickle\",\n conda_env=conda_env,\n signature=signature,\n input_example=input_example,\n )\n run
    = client.create_run(experiment_id=\"0\")\n client.log_artifact(run.info.run_id,
    model_name)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Upload
    sklearn model to mlflow'', description='''')\n_parser.add_argument(\"--model-name\",
    dest=\"model_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\",
    dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--input-example\",
    dest=\"input_example_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--signature\",
    dest=\"signature_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--conda-env\",
    dest=\"conda_env_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = upload_sklearn_model_to_mlflow(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "model_name", "type": "String"},
    {"name": "model", "type": "dill"}, {"name": "input_example", "type": "dill"},
    {"name": "signature", "type": "dill"}, {"name": "conda_env", "type": "dill"}],
    "name": "Upload sklearn model to mlflow"}', pipelines.kubeflow.org/component_ref: '{}',
    pipelines.kubeflow.org/arguments.parameters: '{"model_name": "{{inputs.parameters.model_name}}"}'}
    arguments:
    parameters:
    - {name: kernel}
    - {name: model_name}
    serviceAccountName: pipeline-runner

    After generating the mlflow_pipeline.yaml file after execution, upload the pipeline and execute it to check the results of the run.

    mlflow-svc-0

    Port-forward the mlflow service to access the MLflow UI.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open the web browser and connect to localhost:5000. You will then be able to see that the run has been created as follows.

    mlflow-svc-1

    Click on run to verify that the trained model file is present.

    mlflow-svc-2

    + \ No newline at end of file diff --git a/en/docs/kubeflow/advanced-pipeline/index.html b/en/docs/kubeflow/advanced-pipeline/index.html index 4ea0c1c9..336c2ea7 100644 --- a/en/docs/kubeflow/advanced-pipeline/index.html +++ b/en/docs/kubeflow/advanced-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ If you need to train a model using a GPU and the Kubernetes environment doesn't allocate a GPU, the training may not be performed correctly.
    To address this, you can use the set_gpu_limit() attribute to set the GPU limit.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    If you execute the above script, you can see that the resources has been added with {nvidia.com/gpu: 1} in the generated file when you look closely at sum-and-print-numbers. Through this, you can allocate a GPU.

      - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    resources:
    limits: {nvidia.com/gpu: 1}

    CPU

    The function to set the number of CPUs can be set using the .set_cpu_limit() attribute attribute.
    -The difference from GPUs is that the input must be a string, not an int.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed part only can be confirmed as follows.

          resources:
    limits: {nvidia.com/gpu: 1, cpu: '16'}

    Memory

    Memory can be set using the .set_memory_limit() attribute.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed parts are as follows if checked.

          resources:
    limits: {nvidia.com/gpu: 1, memory: 1G}
    - +The difference from GPUs is that the input must be a string, not an int.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_cpu_limit("16")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed part only can be confirmed as follows.

          resources:
    limits: {nvidia.com/gpu: 1, cpu: '16'}

    Memory

    Memory can be set using the .set_memory_limit() attribute.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number


    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1).set_display_name("This is number 1")
    number_2_result = print_and_return_number(number_2).set_display_name("This is number 2")
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    ).set_display_name("This is sum of number 1 and number 2").set_gpu_limit(1).set_memory_limit("1G")


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The changed parts are as follows if checked.

          resources:
    limits: {nvidia.com/gpu: 1, memory: 1G}
    + \ No newline at end of file diff --git a/en/docs/kubeflow/advanced-run/index.html b/en/docs/kubeflow/advanced-run/index.html index bcb350ea..903e9a87 100644 --- a/en/docs/kubeflow/advanced-run/index.html +++ b/en/docs/kubeflow/advanced-run/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ Graph, Run Output, and Config.

    advanced-run-0.png

    Graph

    advanced-run-1.png

    In the graph, if you click on the run component, you can check the running information of the component.

    Input/Output

    The Input/Output tab allows you to view and download the Configurations, Input, and Output Artifacts used in the components.

    Logs

    In the Logs tab, you can view all the stdout output generated during the execution of the Python code. However, pods are deleted after a certain period of time, so you may not be able to view them in this tab after a certain time. In that case, you can check them in the main-logs section of the Output artifacts.

    Visualizations

    The Visualizations tab displays plots generated by the components.

    To generate a plot, you can save the desired values as an argument using mlpipeline_ui_metadata: OutputPath("UI_Metadata"). The plot should be in HTML format. -The conversion process is as follows.


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(
    mlpipeline_ui_metadata: OutputPath("UI_Metadata")
    ):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot(x=[1, 2, 3], y=[1, 2,3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    If written in pipeline, it will be like this.

    from functools import partial

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot([1, 2, 3], [1, 2, 3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)


    @pipeline(name="plot_pipeline")
    def plot_pipeline():
    plot_linear()


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")

    If you run this script and check the resulting plot_pipeline.yaml, you will see the following.

    plot_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: plot-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: '2
    022-01-17T13:31:32.963214',
    pipelines.kubeflow.org/pipeline_spec: '{"name": "plot_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}
    spec:
    entrypoint: plot-pipeline
    templates:
    - name: plot-linear
    container:
    args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'matplotlib' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet
    --no-warn-script-location 'matplotlib' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path
    def plot_linear(mlpipeline_ui_metadata):
    import base64
    import json
    from io import BytesIO
    import matplotlib.pyplot as plt
    plt.plot([1, 2, 3], [1, 2, 3])
    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")
    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Plot linear', description='')
    _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())
    _outputs = plot_linear(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.9
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''matplotlib'' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''matplotlib''
    --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef plot_linear(mlpipeline_ui_metadata):\n import
    base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot
    as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile,
    format=\"png\")\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\"utf-8\")\n\n html
    = f\"<img src=''data:image/png;base64,{encoded}''>\"\n metadata = {\n \"outputs\":
    [\n {\n \"type\": \"web-app\",\n \"storage\":
    \"inline\",\n \"source\": html,\n },\n ],\n }\n with
    open(mlpipeline_ui_metadata, \"w\") as html_writer:\n json.dump(metadata,
    html_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Plot
    linear'', description='''')\n_parser.add_argument(\"--mlpipeline-ui-metadata\",
    dest=\"mlpipeline_ui_metadata\", type=_make_parent_dirs_and_return_path,
    required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs
    = plot_linear(**_parsed_args)\n"], "image": "python:3.7"}}, "name": "Plot
    linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}',
    pipelines.kubeflow.org/component_ref: '{}'}
    - name: plot-pipeline
    dag:
    tasks:
    - {name: plot-linear, template: plot-linear}
    arguments:
    parameters: []
    serviceAccountName: pipeline-runner

    After running, click Visualization.

    advanced-run-5.png

    Run output

    advanced-run-2.png

    Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric).

    To show the evaluation index (Metric), you can save the name and value you want to show in the mlpipeline_metrics_path: OutputPath("Metrics") argument in json format. For example, you can write it like this.

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    We will add a component to generate evaluation metrics to the pipeline created in the Pipeline and execute it. The whole pipeline is as follows.

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int) -> int:
    sum_number = number_1 + number_2
    print(sum_number)
    return sum_number

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )
    show_metric_of_sum(sum_result.output)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    After execution, click Run Output and it will show like this.

    advanced-run-4.png

    Config

    advanced-run-3.png

    In the Config tab, you can view all the values received as pipeline configurations.

    - +The conversion process is as follows.


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(
    mlpipeline_ui_metadata: OutputPath("UI_Metadata")
    ):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot(x=[1, 2, 3], y=[1, 2,3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    If written in pipeline, it will be like this.

    from functools import partial

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @partial(
    create_component_from_func,
    packages_to_install=["matplotlib"],
    )
    def plot_linear(mlpipeline_ui_metadata: OutputPath("UI_Metadata")):
    import base64
    import json
    from io import BytesIO

    import matplotlib.pyplot as plt

    plt.plot([1, 2, 3], [1, 2, 3])

    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")

    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)


    @pipeline(name="plot_pipeline")
    def plot_pipeline():
    plot_linear()


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(plot_pipeline, "plot_pipeline.yaml")

    If you run this script and check the resulting plot_pipeline.yaml, you will see the following.

    plot_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: plot-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9, pipelines.kubeflow.org/pipeline_compilation_time: '2
    022-01-17T13:31:32.963214',
    pipelines.kubeflow.org/pipeline_spec: '{"name": "plot_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.8.9}
    spec:
    entrypoint: plot-pipeline
    templates:
    - name: plot-linear
    container:
    args: [--mlpipeline-ui-metadata, /tmp/outputs/mlpipeline_ui_metadata/data]
    command:
    - sh
    - -c
    - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location
    'matplotlib' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet
    --no-warn-script-location 'matplotlib' --user) && "$0" "$@"
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def _make_parent_dirs_and_return_path(file_path: str):
    import os
    os.makedirs(os.path.dirname(file_path), exist_ok=True)
    return file_path
    def plot_linear(mlpipeline_ui_metadata):
    import base64
    import json
    from io import BytesIO
    import matplotlib.pyplot as plt
    plt.plot([1, 2, 3], [1, 2, 3])
    tmpfile = BytesIO()
    plt.savefig(tmpfile, format="png")
    encoded = base64.b64encode(tmpfile.getvalue()).decode("utf-8")
    html = f"<img src='data:image/png;base64,{encoded}'>"
    metadata = {
    "outputs": [
    {
    "type": "web-app",
    "storage": "inline",
    "source": html,
    },
    ],
    }
    with open(mlpipeline_ui_metadata, "w") as html_writer:
    json.dump(metadata, html_writer)

    import argparse
    _parser = argparse.ArgumentParser(prog='Plot linear', description='')
    _parser.add_argument("--mlpipeline-ui-metadata", dest="mlpipeline_ui_metadata", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())
    _outputs = plot_linear(**_parsed_args)
    image: python:3.7
    outputs:
    artifacts:
    - {name: mlpipeline-ui-metadata, path: /tmp/outputs/mlpipeline_ui_metadata/data}
    metadata:
    labels:
    pipelines.kubeflow.org/kfp_sdk_version: 1.8.9
    pipelines.kubeflow.org/pipeline-sdk-type: kfp
    pipelines.kubeflow.org/enable_caching: "true"
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--mlpipeline-ui-metadata", {"outputPath": "mlpipeline_ui_metadata"}],
    "command": ["sh", "-c", "(PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip
    install --quiet --no-warn-script-location ''matplotlib'' || PIP_DISABLE_PIP_VERSION_CHECK=1
    python3 -m pip install --quiet --no-warn-script-location ''matplotlib''
    --user) && \"$0\" \"$@\"", "sh", "-ec", "program_path=$(mktemp)\nprintf
    \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n",
    "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path),
    exist_ok=True)\n return file_path\n\ndef plot_linear(mlpipeline_ui_metadata):\n import
    base64\n import json\n from io import BytesIO\n\n import matplotlib.pyplot
    as plt\n\n plt.plot([1, 2, 3], [1, 2, 3])\n\n tmpfile = BytesIO()\n plt.savefig(tmpfile,
    format=\"png\")\n encoded = base64.b64encode(tmpfile.getvalue()).decode(\"utf-8\")\n\n html
    = f\"<img src=''data:image/png;base64,{encoded}''>\"\n metadata = {\n \"outputs\":
    [\n {\n \"type\": \"web-app\",\n \"storage\":
    \"inline\",\n \"source\": html,\n },\n ],\n }\n with
    open(mlpipeline_ui_metadata, \"w\") as html_writer:\n json.dump(metadata,
    html_writer)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Plot
    linear'', description='''')\n_parser.add_argument(\"--mlpipeline-ui-metadata\",
    dest=\"mlpipeline_ui_metadata\", type=_make_parent_dirs_and_return_path,
    required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs
    = plot_linear(**_parsed_args)\n"], "image": "python:3.7"}}, "name": "Plot
    linear", "outputs": [{"name": "mlpipeline_ui_metadata", "type": "UI_Metadata"}]}',
    pipelines.kubeflow.org/component_ref: '{}'}
    - name: plot-pipeline
    dag:
    tasks:
    - {name: plot-linear, template: plot-linear}
    arguments:
    parameters: []
    serviceAccountName: pipeline-runner

    After running, click Visualization.

    advanced-run-5.png

    Run output

    advanced-run-2.png

    Run output is where Kubeflow gathers the Artifacts generated in the specified form and shows the evaluation index (Metric).

    To show the evaluation index (Metric), you can save the name and value you want to show in the mlpipeline_metrics_path: OutputPath("Metrics") argument in json format. For example, you can write it like this.

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    We will add a component to generate evaluation metrics to the pipeline created in the Pipeline and execute it. The whole pipeline is as follows.

    import kfp
    from kfp.components import create_component_from_func, OutputPath
    from kfp.dsl import pipeline


    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int) -> int:
    sum_number = number_1 + number_2
    print(sum_number)
    return sum_number

    @create_component_from_func
    def show_metric_of_sum(
    number: int,
    mlpipeline_metrics_path: OutputPath("Metrics"),
    ):
    import json
    metrics = {
    "metrics": [
    {
    "name": "sum_value",
    "numberValue": number,
    },
    ],
    }
    with open(mlpipeline_metrics_path, "w") as f:
    json.dump(metrics, f)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )
    show_metric_of_sum(sum_result.output)


    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    After execution, click Run Output and it will show like this.

    advanced-run-4.png

    Config

    advanced-run-3.png

    In the Config tab, you can view all the values received as pipeline configurations.

    + \ No newline at end of file diff --git a/en/docs/kubeflow/basic-component/index.html b/en/docs/kubeflow/basic-component/index.html index 9bd93042..e5e3203f 100644 --- a/en/docs/kubeflow/basic-component/index.html +++ b/en/docs/kubeflow/basic-component/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ We can write it in Python code like this.

    print(number)

    However, when this code is run, an error occurs and it does not work because the number that should be printed is not defined.

    As we saw in Kubeflow Concepts, values like number that are required in component content are defined in Config. In order to execute component content, the necessary Configs must be passed from the component wrapper.

    Component Wrapper

    Define a standalone Python function

    Now we need to create a component wrapper to be able to pass the required Configs.

    Without a separate Config, it will be like this when wrapped with a component wrapper.

    def print_and_return_number():
    print(number)
    return number

    Now we add the required Config for the content as an argument to the wrapper. However, it is not just writing the argument but also writing the type hint of the argument. When Kubeflow converts the pipeline into the Kubeflow format, it checks if the specified input and output types are matched in the connection between the components. If the format of the input required by the component does not match the output received from another component, the pipeline cannot be created.

    Now we complete the component wrapper by writing down the argument, its type and the type to be returned as follows.

    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    In Kubeflow, you can only use types that can be expressed in json as return values. The most commonly used and recommended types are as follows:

    • int
    • float
    • str

    If you want to return multiple values instead of a single value, you must use collections.namedtuple.
    For more details, please refer to the Kubeflow official documentation Kubeflow Official Documentation.
    For example, if you want to write a component that returns the quotient and remainder of a number when divided by 2, it should be written as follows.

    from typing import NamedTuple


    def divide_and_return_number(
    number: int,
    ) -> NamedTuple("DivideOutputs", [("quotient", int), ("remainder", int)]):
    from collections import namedtuple

    quotient, remainder = divmod(number, 2)
    print("quotient is", quotient)
    print("remainder is", remainder)

    divide_outputs = namedtuple(
    "DivideOutputs",
    [
    "quotient",
    "remainder",
    ],
    )
    return divide_outputs(quotient, remainder)

    Convert to Kubeflow Format

    Now you have to convert the written component into a format that can be used in Kubeflow. The conversion can be done through kfp.components.create_component_from_func. This converted form can be imported as a function in Python and used in the pipeline.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    Share component with yaml file

    If it is not possible to share with Python code, you can share components with a YAML file and use them. -To do this, first convert the component to a YAML file and then use it in the pipeline with kfp.components.load_component_from_file.

    First, let's explain the process of converting the written component to a YAML file.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    if __name__ == "__main__":
    print_and_return_number.component_spec.save("print_and_return_number.yaml")

    If you run the Python code you wrote, a file called print_and_return_number.yaml will be created. When you check the file, it will be as follows.

    name: Print and return number
    inputs:
    - {name: number, type: Integer}
    outputs:
    - {name: Output, type: Integer}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    args:
    - --number
    - {inputValue: number}
    - '----output-paths'
    - {outputPath: Output}

    Now the generated file can be shared and used in the pipeline as follows.

    from kfp.components import load_component_from_file

    print_and_return_number = load_component_from_file("print_and_return_number.yaml")

    How Kubeflow executes component

    In Kubeflow, the execution order of components is as follows:

    1. docker pull <image>: Pull the image containing the execution environment information of the defined component.
    2. Run command: Execute the component's content within the pulled image.

    Taking print_and_return_number.yaml as an example, the default image in @create_component_from_func is python:3.7, so the component's content will be executed based on that image.

    1. docker pull python:3.7
    2. print(number)

    References:

    - +To do this, first convert the component to a YAML file and then use it in the pipeline with kfp.components.load_component_from_file.

    First, let's explain the process of converting the written component to a YAML file.

    from kfp.components import create_component_from_func

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    if __name__ == "__main__":
    print_and_return_number.component_spec.save("print_and_return_number.yaml")

    If you run the Python code you wrote, a file called print_and_return_number.yaml will be created. When you check the file, it will be as follows.

    name: Print and return number
    inputs:
    - {name: number, type: Integer}
    outputs:
    - {name: Output, type: Integer}
    implementation:
    container:
    image: python:3.7
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    args:
    - --number
    - {inputValue: number}
    - '----output-paths'
    - {outputPath: Output}

    Now the generated file can be shared and used in the pipeline as follows.

    from kfp.components import load_component_from_file

    print_and_return_number = load_component_from_file("print_and_return_number.yaml")

    How Kubeflow executes component

    In Kubeflow, the execution order of components is as follows:

    1. docker pull <image>: Pull the image containing the execution environment information of the defined component.
    2. Run command: Execute the component's content within the pulled image.

    Taking print_and_return_number.yaml as an example, the default image in @create_component_from_func is python:3.7, so the component's content will be executed based on that image.

    1. docker pull python:3.7
    2. print(number)

    References:

    + \ No newline at end of file diff --git a/en/docs/kubeflow/basic-pipeline-upload/index.html b/en/docs/kubeflow/basic-pipeline-upload/index.html index f521e0d7..a2664528 100644 --- a/en/docs/kubeflow/basic-pipeline-upload/index.html +++ b/en/docs/kubeflow/basic-pipeline-upload/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    Version: Next

    6. Pipeline - Upload

    Upload Pipeline

    Now, let's upload the pipeline we created directly to kubeflow.
    Pipeline uploads can be done through the kubeflow dashboard UI. Use the method used in Install Kubeflow to do port forwarding.

    kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

    Access http://localhost:8080 to open the dashboard.

    1. Click Pipelines Tab

    pipeline-gui-0.png

    2. Click Upload Pipeline

    pipeline-gui-1.png

    3. Click Choose file

    pipeline-gui-2.png

    4. Upload created yaml file

    pipeline-gui-3.png

    5. Create

    pipeline-gui-4.png

    Upload Pipeline Version

    The uploaded pipeline allows you to manage versions through uploads. However, it serves the role of gathering pipelines with the same name rather than version management at the code level, such as Github. -In the example above, clicking on example_pipeline will bring up the following screen.

    pipeline-gui-5.png

    If you click this screen shows.

    pipeline-gui-4.png

    If you click Upload Version, a screen appears where you can upload the pipeline.

    pipeline-gui-6.png

    Now, upload your pipeline.

    pipeline-gui-7.png

    Once uploaded, you can check the pipeline version as follows.

    pipeline-gui-8.png

    - +In the example above, clicking on example_pipeline will bring up the following screen.

    pipeline-gui-5.png

    If you click this screen shows.

    pipeline-gui-4.png

    If you click Upload Version, a screen appears where you can upload the pipeline.

    pipeline-gui-6.png

    Now, upload your pipeline.

    pipeline-gui-7.png

    Once uploaded, you can check the pipeline version as follows.

    pipeline-gui-8.png

    + \ No newline at end of file diff --git a/en/docs/kubeflow/basic-pipeline/index.html b/en/docs/kubeflow/basic-pipeline/index.html index e87d8bcd..eb6914d1 100644 --- a/en/docs/kubeflow/basic-pipeline/index.html +++ b/en/docs/kubeflow/basic-pipeline/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ The return value of the stored number_1_result can be used through number_1_resulst.output.

    Multi Output

    In the example above, the components return a single value, so it can be directly used with output.
    However, if there are multiple return values, they will be stored in outputs as a dictionary. You can use the keys to access the desired return values. Let's consider an example with a component that returns multiple values, like the one mentioned in the component definition. The divide_and_return_number component returns quotient and remainder. Here's an example of passing these two values to print_and_return_number:

    def multi_pipeline():
    divided_result = divde_and_return_number(number)
    num_1_result = print_and_return_number(divided_result.outputs["quotient"])
    num_2_result = print_and_return_number(divided_result.outputs["remainder"])

    Store the result of divide_and_return_number in divided_result and you can get the values of each by divided_result.outputs["quotient"] and divided_result.outputs["remainder"].

    Write to python code

    Now, let's get back to the main topic and pass the result of these two values to sum_and_print_numbers.

    def example_pipeline():
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    Next, gather the necessary Configs for each component and define it as a pipeline Config.

    def example_pipeline(number_1: int, number_2:int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    Convert to Kubeflow Format

    Finally, convert it into a format that can be used in Kubeflow. The conversion can be done using the kfp.dsl.pipeline function.

    from kfp.dsl import pipeline


    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    In order to run a pipeline in Kubeflow, it needs to be compiled into the designated yaml format as only yaml format is possible, so the created pipeline needs to be compiled into a specific yaml format. -Compilation can be done using the following command.

    if __name__ == "__main__":
    import kfp
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    Conclusion

    As explained earlier, if we gather the content into a Python code, it will look like this.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The compiled result is as follows.

    example_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: example-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
    "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
    spec:
    entrypoint: example-pipeline
    templates:
    - name: example-pipeline
    inputs:
    parameters:
    - {name: number_1}
    - {name: number_2}
    dag:
    tasks:
    - name: print-and-return-number
    template: print-and-return-number
    arguments:
    parameters:
    - {name: number_1, value: '{{inputs.parameters.number_1}}'}
    - name: print-and-return-number-2
    template: print-and-return-number-2
    arguments:
    parameters:
    - {name: number_2, value: '{{inputs.parameters.number_2}}'}
    - name: sum-and-print-numbers
    template: sum-and-print-numbers
    dependencies: [print-and-return-number, print-and-return-number-2]
    arguments:
    parameters:
    - {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
    - {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
    - name: print-and-return-number
    container:
    args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_1}
    outputs:
    parameters:
    - name: print-and-return-number-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_1}}"}'}
    - name: print-and-return-number-2
    container:
    args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_2}
    outputs:
    parameters:
    - name: print-and-return-number-2-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_2}}"}'}
    - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: print-and-return-number-2-Output}
    - {name: print-and-return-number-Output}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
    "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
    argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
    description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
    type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
    dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
    {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
    pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
    "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
    arguments:
    parameters:
    - {name: number_1}
    - {name: number_2}
    serviceAccountName: pipeline-runner
    - +Compilation can be done using the following command.

    if __name__ == "__main__":
    import kfp
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    Conclusion

    As explained earlier, if we gather the content into a Python code, it will look like this.

    import kfp
    from kfp.components import create_component_from_func
    from kfp.dsl import pipeline

    @create_component_from_func
    def print_and_return_number(number: int) -> int:
    print(number)
    return number

    @create_component_from_func
    def sum_and_print_numbers(number_1: int, number_2: int):
    print(number_1 + number_2)

    @pipeline(name="example_pipeline")
    def example_pipeline(number_1: int, number_2: int):
    number_1_result = print_and_return_number(number_1)
    number_2_result = print_and_return_number(number_2)
    sum_result = sum_and_print_numbers(
    number_1=number_1_result.output, number_2=number_2_result.output
    )

    if __name__ == "__main__":
    kfp.compiler.Compiler().compile(example_pipeline, "example_pipeline.yaml")

    The compiled result is as follows.

    example_pipeline.yaml
    apiVersion: argoproj.io/v1alpha1
    kind: Workflow
    metadata:
    generateName: example-pipeline-
    annotations: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline_compilation_time: '2021-12-05T13:38:51.566777',
    pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"name": "number_1", "type":
    "Integer"}, {"name": "number_2", "type": "Integer"}], "name": "example_pipeline"}'}
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3}
    spec:
    entrypoint: example-pipeline
    templates:
    - name: example-pipeline
    inputs:
    parameters:
    - {name: number_1}
    - {name: number_2}
    dag:
    tasks:
    - name: print-and-return-number
    template: print-and-return-number
    arguments:
    parameters:
    - {name: number_1, value: '{{inputs.parameters.number_1}}'}
    - name: print-and-return-number-2
    template: print-and-return-number-2
    arguments:
    parameters:
    - {name: number_2, value: '{{inputs.parameters.number_2}}'}
    - name: sum-and-print-numbers
    template: sum-and-print-numbers
    dependencies: [print-and-return-number, print-and-return-number-2]
    arguments:
    parameters:
    - {name: print-and-return-number-2-Output, value: '{{tasks.print-and-return-number-2.outputs.parameters.print-and-return-number-2-Output}}'}
    - {name: print-and-return-number-Output, value: '{{tasks.print-and-return-number.outputs.parameters.print-and-return-number-Output}}'}
    - name: print-and-return-number
    container:
    args: [--number, '{{inputs.parameters.number_1}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_1}
    outputs:
    parameters:
    - name: print-and-return-number-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_1}}"}'}
    - name: print-and-return-number-2
    container:
    args: [--number, '{{inputs.parameters.number_2}}', '----output-paths', /tmp/outputs/Output/data]
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def print_and_return_number(number):
    print(number)
    return number

    def _serialize_int(int_value: int) -> str:
    if isinstance(int_value, str):
    return int_value
    if not isinstance(int_value, int):
    raise TypeError('Value "{}" has type "{}" instead of int.'.format(str(int_value), str(type(int_value))))
    return str(int_value)

    import argparse
    _parser = argparse.ArgumentParser(prog='Print and return number', description='')
    _parser.add_argument("--number", dest="number", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1)
    _parsed_args = vars(_parser.parse_args())
    _output_files = _parsed_args.pop("_output_paths", [])

    _outputs = print_and_return_number(**_parsed_args)

    _outputs = [_outputs]

    _output_serializers = [
    _serialize_int,

    ]

    import os
    for idx, output_file in enumerate(_output_files):
    try:
    os.makedirs(os.path.dirname(output_file))
    except OSError:
    pass
    with open(output_file, 'w') as f:
    f.write(_output_serializers[idx](_outputs[idx]))
    image: python:3.7
    inputs:
    parameters:
    - {name: number_2}
    outputs:
    parameters:
    - name: print-and-return-number-2-Output
    valueFrom: {path: /tmp/outputs/Output/data}
    artifacts:
    - {name: print-and-return-number-2-Output, path: /tmp/outputs/Output/data}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number", {"inputValue": "number"}, "----output-paths", {"outputPath":
    "Output"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    print_and_return_number(number):\n print(number)\n return number\n\ndef
    _serialize_int(int_value: int) -> str:\n if isinstance(int_value, str):\n return
    int_value\n if not isinstance(int_value, int):\n raise TypeError(''Value
    \"{}\" has type \"{}\" instead of int.''.format(str(int_value), str(type(int_value))))\n return
    str(int_value)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Print
    and return number'', description='''')\n_parser.add_argument(\"--number\",
    dest=\"number\", type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\",
    dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files
    = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = print_and_return_number(**_parsed_args)\n\n_outputs
    = [_outputs]\n\n_output_serializers = [\n _serialize_int,\n\n]\n\nimport
    os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except
    OSError:\n pass\n with open(output_file, ''w'') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number", "type": "Integer"}],
    "name": "Print and return number", "outputs": [{"name": "Output", "type":
    "Integer"}]}', pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number":
    "{{inputs.parameters.number_2}}"}'}
    - name: sum-and-print-numbers
    container:
    args: [--number-1, '{{inputs.parameters.print-and-return-number-Output}}', --number-2,
    '{{inputs.parameters.print-and-return-number-2-Output}}']
    command:
    - sh
    - -ec
    - |
    program_path=$(mktemp)
    printf "%s" "$0" > "$program_path"
    python3 -u "$program_path" "$@"
    - |
    def sum_and_print_numbers(number_1, number_2):
    print(number_1 + number_2)

    import argparse
    _parser = argparse.ArgumentParser(prog='Sum and print numbers', description='')
    _parser.add_argument("--number-1", dest="number_1", type=int, required=True, default=argparse.SUPPRESS)
    _parser.add_argument("--number-2", dest="number_2", type=int, required=True, default=argparse.SUPPRESS)
    _parsed_args = vars(_parser.parse_args())

    _outputs = sum_and_print_numbers(**_parsed_args)
    image: python:3.7
    inputs:
    parameters:
    - {name: print-and-return-number-2-Output}
    - {name: print-and-return-number-Output}
    metadata:
    labels: {pipelines.kubeflow.org/kfp_sdk_version: 1.6.3, pipelines.kubeflow.org/pipeline-sdk-type: kfp}
    annotations: {pipelines.kubeflow.org/component_spec: '{"implementation": {"container":
    {"args": ["--number-1", {"inputValue": "number_1"}, "--number-2", {"inputValue":
    "number_2"}], "command": ["sh", "-ec", "program_path=$(mktemp)\nprintf \"%s\"
    \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def
    sum_and_print_numbers(number_1, number_2):\n print(number_1 + number_2)\n\nimport
    argparse\n_parser = argparse.ArgumentParser(prog=''Sum and print numbers'',
    description='''')\n_parser.add_argument(\"--number-1\", dest=\"number_1\",
    type=int, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--number-2\",
    dest=\"number_2\", type=int, required=True, default=argparse.SUPPRESS)\n_parsed_args
    = vars(_parser.parse_args())\n\n_outputs = sum_and_print_numbers(**_parsed_args)\n"],
    "image": "python:3.7"}}, "inputs": [{"name": "number_1", "type": "Integer"},
    {"name": "number_2", "type": "Integer"}], "name": "Sum and print numbers"}',
    pipelines.kubeflow.org/component_ref: '{}', pipelines.kubeflow.org/arguments.parameters: '{"number_1":
    "{{inputs.parameters.print-and-return-number-Output}}", "number_2": "{{inputs.parameters.print-and-return-number-2-Output}}"}'}
    arguments:
    parameters:
    - {name: number_1}
    - {name: number_2}
    serviceAccountName: pipeline-runner
    + \ No newline at end of file diff --git a/en/docs/kubeflow/basic-requirements/index.html b/en/docs/kubeflow/basic-requirements/index.html index f79a5f30..bdaaaf50 100644 --- a/en/docs/kubeflow/basic-requirements/index.html +++ b/en/docs/kubeflow/basic-requirements/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    3. Install Requirements

    The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to Appendix 1. Python Virtual Environment and install the packages on the client node.

    The packages and versions required for the practice are as follows:

    • requirements.txt

      kfp==1.8.9
      scikit-learn==1.0.1
      mlflow==1.21.0
      pandas==1.3.4
      dill==0.3.4

    Activate the Python virtual environment created in the previous section.

    pyenv activate demo

    We are proceeding with the package installation.

    pip3 install -U pip
    pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
    - +
    Version: Next

    3. Install Requirements

    The recommended Python version for practice is python>=3.7. For those unfamiliar with the Python environment, please refer to Appendix 1. Python Virtual Environment and install the packages on the client node.

    The packages and versions required for the practice are as follows:

    • requirements.txt

      kfp==1.8.9
      scikit-learn==1.0.1
      mlflow==1.21.0
      pandas==1.3.4
      dill==0.3.4

    Activate the Python virtual environment created in the previous section.

    pyenv activate demo

    We are proceeding with the package installation.

    pip3 install -U pip
    pip3 install kfp==1.8.9 scikit-learn==1.0.1 mlflow==1.21.0 pandas==1.3.4 dill==0.3.4
    + \ No newline at end of file diff --git a/en/docs/kubeflow/basic-run/index.html b/en/docs/kubeflow/basic-run/index.html index dca528a2..e9c6f99b 100644 --- a/en/docs/kubeflow/basic-run/index.html +++ b/en/docs/kubeflow/basic-run/index.html @@ -7,14 +7,14 @@ - +
    Version: Next

    7. Pipeline - Run

    Run Pipeline

    Now we will run the uploaded pipeline.

    Before Run

    1. Create Experiment

    Experiments in Kubeflow are units that logically manage runs executed within them.

    When you first enter the namespace in Kubeflow, there are no Experiments created. Therefore, you must create an Experiment beforehand in order to run the pipeline. If an Experiment already exists, you can go to Run Pipeline.

    Experiments can be created via the Create Experiment button.

    run-0.png

    2. Name 입력

    run-1.png

    Run Pipeline

    1. Select Create Run

    run-2.png

    2. Select Experiment

    run-9.png

    run-10.png

    3. Enter Pipeline Config

    Fill in the values of the Config provided when creating the pipeline. The uploaded pipeline requires input values for number_1 and number_2.

    run-3.png

    4. Start

    Click the Start button after entering the values. The pipeline will start running.

    run-4.png

    Run Result

    The executed pipelines can be viewed in the Runs tab. -Clicking on a run provides detailed information related to the executed pipeline.

    run-5.png

    Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray.

    run-6.png

    When a component has completed execution, it is marked with a green checkmark.

    run-7.png

    If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5).

    run-8.png

    - +Clicking on a run provides detailed information related to the executed pipeline.

    run-5.png

    Upon clicking, the following screen appears. Components that have not yet executed are displayed in gray.

    run-6.png

    When a component has completed execution, it is marked with a green checkmark.

    run-7.png

    If we look at the last component, we can see that it has outputted the sum of the input values, which in this case is 8 (the sum of 3 and 5).

    run-8.png

    + \ No newline at end of file diff --git a/en/docs/kubeflow/how-to-debug/index.html b/en/docs/kubeflow/how-to-debug/index.html index 9488b055..1b8a5a5e 100644 --- a/en/docs/kubeflow/how-to-debug/index.html +++ b/en/docs/kubeflow/how-to-debug/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Let's investigate what might be the problem.

    First, click on the component and go to the Input/Output tab to download the input data.
    You can click on the link indicated by the red square to download the data.

    debug-5.png

    Download both files to the same location. Then navigate to the specified path and check the downloaded files.

    ls

    There are two files as follows.

    drop-na-from-csv-output.tgz load-iris-data-target.tgz

    I will try to unzip it.

    tar -xzvf load-iris-data-target.tgz ; mv data target.csv
    tar -xzvf drop-na-from-csv-output.tgz ; mv data data.csv

    And then run the component code using a Jupyter notebook. debug-3.png

    Debugging revealed that dropping the data was based on rows instead of columns, resulting in all the data being removed. -Now that we know the cause of the problem, we can modify the component to drop based on columns.

    @partial(
    create_component_from_func,
    packages_to_install=["pandas"],
    )
    def drop_na_from_csv(
    data_path: InputPath("csv"),
    output_path: OutputPath("csv"),
    ):
    import pandas as pd

    data = pd.read_csv(data_path)
    data = data.dropna(axis="columns")
    data.to_csv(output_path, index=False)

    After modifying, upload the pipeline again and run it to confirm that it is running normally as follows.

    debug-6.png

    - +Now that we know the cause of the problem, we can modify the component to drop based on columns.

    @partial(
    create_component_from_func,
    packages_to_install=["pandas"],
    )
    def drop_na_from_csv(
    data_path: InputPath("csv"),
    output_path: OutputPath("csv"),
    ):
    import pandas as pd

    data = pd.read_csv(data_path)
    data = data.dropna(axis="columns")
    data.to_csv(output_path, index=False)

    After modifying, upload the pipeline again and run it to confirm that it is running normally as follows.

    debug-6.png

    + \ No newline at end of file diff --git a/en/docs/kubeflow/kubeflow-concepts/index.html b/en/docs/kubeflow/kubeflow-concepts/index.html index 47ffac53..2fd1b0cd 100644 --- a/en/docs/kubeflow/kubeflow-concepts/index.html +++ b/en/docs/kubeflow/kubeflow-concepts/index.html @@ -7,7 +7,7 @@ - + @@ -18,8 +18,8 @@ Next, Python Code w\ Config is where the given Config is used to actually perform the training.
    Finally, there is a process to save the artifacts.

    Component Wrapper

    Component wrappers deliver the necessary Config and execute tasks for component content.

    concept-3.png

    In Kubeflow, component wrappers are defined as functions, similar to the train_svc_from_csv example above. When a component wrapper wraps the contents, it looks like the following:

    concept-4.png

    Artifacts

    In the explanation above, it was mentioned that the component creates Artifacts. Artifacts is a term used to refer to any form of a file that is generated, such as evaluation results, logs, etc. -Of the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc.

    concept-5.png

    • Model
    • Data
    • Metric
    • etc

    Model

    We defined the model as follows:

    A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.

    Data

    Data includes preprocessed features, model predictions, etc.

    Metric

    Metric is divided into two categories: dynamic metrics and static metrics.

    • Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch.
    • Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.

    Pipeline

    A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements.

    concept-6.png

    Pipeline Config

    As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline.

    concept-7.png

    Run

    To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."

    concept-8.png

    When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored.

    concept-9.png

    Now, let's learn how to write components and pipelines.

    - +Of the ones that we are interested in, the following are significant: Models, Data, Metrics, and etc.

    concept-5.png

    • Model
    • Data
    • Metric
    • etc

    Model

    We defined the model as follows:

    A model is a form that includes Python code, trained weights and network architecture, and an environment to run it.

    Data

    Data includes preprocessed features, model predictions, etc.

    Metric

    Metric is divided into two categories: dynamic metrics and static metrics.

    • Dynamic metrics refer to values that continuously change during the training process, such as train loss per epoch.
    • Static metrics refer to evaluation metrics, such as accuracy, that are calculated after the training is completed.

    Pipeline

    A pipeline consists of a collection of components and the order in which they are executed. The order forms a directed acyclic graph (DAG), which can include simple conditional statements.

    concept-6.png

    Pipeline Config

    As mentioned earlier, components require config to be executed. The pipeline config contains the configs for all the components in the pipeline.

    concept-7.png

    Run

    To execute a pipeline, the pipeline config specific to that pipeline is required. In Kubeflow, an executed pipeline is called a "Run."

    concept-8.png

    When a pipeline is executed, each component generates artifacts. Kubeflow pipeline assigns a unique ID to each Run, and all artifacts generated during the Run are stored.

    concept-9.png

    Now, let's learn how to write components and pipelines.

    + \ No newline at end of file diff --git a/en/docs/kubeflow/kubeflow-intro/index.html b/en/docs/kubeflow/kubeflow-intro/index.html index 2523f503..d70f4c98 100644 --- a/en/docs/kubeflow/kubeflow-intro/index.html +++ b/en/docs/kubeflow/kubeflow-intro/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    1. Kubeflow Introduction

    To use Kubeflow, you need to write components and pipelines.

    The approach described in MLOps for ALL differs slightly from the method described on the Kubeflow Pipeline official website. Here, Kubeflow Pipeline is used as one of the components in the elements that make up MLOps rather than a standalone workflow.

    Now, let's understand what components and pipelines are and how to write them.

    - +
    Version: Next

    1. Kubeflow Introduction

    To use Kubeflow, you need to write components and pipelines.

    The approach described in MLOps for ALL differs slightly from the method described on the Kubeflow Pipeline official website. Here, Kubeflow Pipeline is used as one of the components in the elements that make up MLOps rather than a standalone workflow.

    Now, let's understand what components and pipelines are and how to write them.

    + \ No newline at end of file diff --git a/en/docs/prerequisites/docker/advanced/index.html b/en/docs/prerequisites/docker/advanced/index.html index d69b2d37..f70d9c7b 100644 --- a/en/docs/prerequisites/docker/advanced/index.html +++ b/en/docs/prerequisites/docker/advanced/index.html @@ -7,7 +7,7 @@ - + @@ -25,8 +25,8 @@ To address this problem, docker provides the -m option which allows you to limit the usage of CPU and memory when running the docker container.

    docker run -d -m 512m --memory-reservation=256m --name 512-limit ubuntu sleep 3600
    docker run -d -m 1g --memory-reservation=256m --name 1g-limit ubuntu sleep 3600

    After running the Docker above, you can check the usage through the 'docker stats' command.

    CONTAINER ID   NAME        CPU %     MEM USAGE / LIMIT   MEM %     NET I/O       BLOCK I/O   PIDS
    4ea1258e2e09 1g-limit 0.00% 300KiB / 1GiB 0.03% 1kB / 0B 0B / 0B 1
    4edf94b9a3e5 512-limit 0.00% 296KiB / 512MiB 0.06% 1.11kB / 0B 0B / 0B 1

    In Kubernetes, when you limit the CPU and memory resources of a pod resource, it is provided using this technique.

    docker run with restart policy

    If there is a need to keep a particular container running continuously, the --restart=always option is provided to try to re-create the container immediately after it is terminated.

    After entering the option, run the docker.

    docker run --restart=always ubuntu

    Run watch -n1 docker ps to check if it is restarting. If it is running normally, Restarting (0) will be printed in STATUS.

    CONTAINER ID   IMAGE     COMMAND   CREATED          STATUS                         PORTS     NAMES
    a911850276e8 ubuntu "bash" 35 seconds ago Restarting (0) 6 seconds ago hungry_vaughan

    When specifying the restart option for a job resource in Kubernetes, this approach is used.

    Running docker run as a background process

    By default, when running a Docker container, it is executed as a foreground process. This means that the terminal that launched the container is automatically attached to it, preventing you from running other commands.

    Let's try an example. Open two terminals, and in one terminal, continuously monitor docker ps, while in the other terminal, execute the following commands one by one and observe the behavior.

    First Practice

    docker run -it ubuntu sleep 10

    You must remain stopped for 10 seconds and you cannot perform any other commands from that container. After 10 seconds, you can check in docker ps that the container has terminated.

    Second Practice

    docker run -it ubuntu sleep 10

    After that, press ctrl + p -> ctrl + q.

    Now you can perform other commands in that terminal, and you can also see that the container is still alive for up to 10 seconds with docker ps. This situation, where you exit from the Docker container, is called "detached". Docker provides an option to run containers in detached mode, which allows you to run the container in the background while executing the run command.

    Third Practice

    docker run -d ubuntu sleep 10

    In detached mode, you can perform other actions in the terminal that executed the command.

    It is good to use detached mode appropriately according to the situation.
    For example, when developing a backend API server that communicates with the DB, the backend API server needs to be constantly checked with hot-loading while changing the source code, but the DB does not need to be monitored, so it can be executed as follows.
    -Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs.

    References

    - +Run the DB container in detached mode, and run the backend API server in attached mode to follow the logs.

    References

    + \ No newline at end of file diff --git a/en/docs/prerequisites/docker/command/index.html b/en/docs/prerequisites/docker/command/index.html index b8789c6a..9810e791 100644 --- a/en/docs/prerequisites/docker/command/index.html +++ b/en/docs/prerequisites/docker/command/index.html @@ -7,7 +7,7 @@ - + @@ -20,8 +20,8 @@ Even when stopped, the data used in the Docker remains in the container. So you can restart the container through restarting. But this process will use disk. So -in order to delete the containers that are not used at all, we should use the docker rm command.

    First, let's check the current containers.

    docker ps -a

    There are three containers as follows.

    CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
    730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

    Let's try to delete the 'demo3' container through the following command.

    docker rm demo3

    The command docker ps -a reduced it to two lines as follows.

    CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

    Delete the remaining containers as well.

    docker rm demo2
    docker rm demo1

    10. Docker rmi

    Command to delete a Docker image.

    docker rmi --help

    Use the following commands to check which images are currently on the local.

    docker images

    The following is output.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    busybox latest a8440bba1bc0 32 hours ago 1.41MB
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    I will try to delete the busybox image.

    docker rmi busybox

    If you type docker images again, the following will appear.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    References

    - +in order to delete the containers that are not used at all, we should use the docker rm command.

    First, let's check the current containers.

    docker ps -a

    There are three containers as follows.

    CONTAINER ID   IMAGE          COMMAND                  CREATED          STATUS                            PORTS     NAMES
    730391669c39 busybox "sh -c 'while true; …" 4 minutes ago Exited (137) About a minute ago demo3
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 7 minutes ago Exited (137) 2 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 10 minutes ago Exited (0) 10 minutes ago demo1

    Let's try to delete the 'demo3' container through the following command.

    docker rm demo3

    The command docker ps -a reduced it to two lines as follows.

    CONTAINER ID   IMAGE          COMMAND        CREATED          STATUS                       PORTS     NAMES
    fc88a83e90f0 ubuntu:18.04 "sleep 3600" 13 minutes ago Exited (137) 8 minutes ago demo2
    4c1aa74a382a ubuntu:18.04 "/bin/bash" 16 minutes ago Exited (0) 16 minutes ago demo1

    Delete the remaining containers as well.

    docker rm demo2
    docker rm demo1

    10. Docker rmi

    Command to delete a Docker image.

    docker rmi --help

    Use the following commands to check which images are currently on the local.

    docker images

    The following is output.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    busybox latest a8440bba1bc0 32 hours ago 1.41MB
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    I will try to delete the busybox image.

    docker rmi busybox

    If you type docker images again, the following will appear.

    REPOSITORY   TAG       IMAGE ID       CREATED        SIZE
    ubuntu 18.04 29e70752d7b2 2 days ago 56.7MB

    References

    + \ No newline at end of file diff --git a/en/docs/prerequisites/docker/images/index.html b/en/docs/prerequisites/docker/images/index.html index 847fba37..6284d057 100644 --- a/en/docs/prerequisites/docker/images/index.html +++ b/en/docs/prerequisites/docker/images/index.html @@ -7,14 +7,14 @@ - +
    Version: Next

    [Practice] Docker images

    • docker commit
      • running container 를 docker image 로 만드는 방법
      • docker commit -m "message" -a "author" <container-id> <image-name>
      • docker commit 을 사용하면, 수동으로 Dockerfile 을 만들지 않고도 도커 이미지를 만들 수 있습니다.
        touch Dockerfile
    1. Move to the docker-practice folder.

    2. Create an empty file called Dockerfile.

    3. 이미지에 특정 패키지를 설치하는 명령어는 무엇입니까?

    Answer: RUN

    Translation: Let's look at the basic commands that can be used in Dockerfile one by one. FROM is a command that specifies which image to use as a base image for Dockerfile. When creating a Docker image, instead of creating the environment I intend from scratch, I can use a pre-made image such as python:3.9, python-3.9-alpine, etc. as the base and install pytorch and add my source code.

    FROM <image>[:<tag>] [AS <name>]

    # 예시
    FROM ubuntu
    FROM ubuntu:18.04
    FROM nginx:latest AS ngx

    The command to copy files or directories from the <src> path on the host (local) to the <dest> path inside the container.

    COPY <src>... <dest>

    # 예시
    COPY a.txt /some-directory/b.txt
    COPY my-directory /some-directory-2

    ADD is similar to COPY but it has additional features.

    # 1 - 호스트에 압축되어있는 파일을 풀면서 컨테이너 내부로 copy 할 수 있음
    ADD scripts.tar.gz /tmp
    # 2 - Remote URLs 에 있는 파일을 소스 경로로 지정할 수 있음
    ADD http://www.example.com/script.sh /tmp

    # 위 두 가지 기능을 사용하고 싶을 경우에만 COPY 대신 ADD 를 사용하는 것을 권장

    The command to run the specified command inside a Docker container. -Docker images maintain the state in which the commands are executed.

    RUN <command>
    RUN ["executable-command", "parameter1", "parameter2"]

    # 예시
    RUN pip install torch
    RUN pip install -r requirements.txt

    CMD specifies a command that the Docker container will run when it starts. There is a similar command called ENTRYPOINT. The difference between them will be discussed later. Note that only one CMD can be run in one Docker image, which is different from RUN command.

    CMD <command>
    CMD ["executable-command", "parameter1", "parameter2"]
    CMD ["parameter1", "parameter2"] # ENTRYPOINT 와 함께 사용될 때

    # 예시
    CMD python main.py

    WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created.

    WORKDIR /path/to/workdir

    # 예시
    WORKDIR /home/demo
    RUN pwd # /home/demo 가 출력됨

    This is a command to set the value of environment variables that will be used continuously inside the container.

    ENV <KEY> <VALUE>
    ENV <KEY>=<VALUE>

    # 예시
    # default 언어 설정
    RUN locale-gen ko_KR.UTF-8
    ENV LANG ko_KR.UTF-8
    ENV LANGUAGE ko_KR.UTF-8
    ENV LC_ALL ko_KR.UTF-8

    You can specify the port/protocol to be opened from the container. If <protocol> is not specified, TCP is set as the default.

    EXPOSE <port>
    EXPOSE <port>/<protocol>

    # 예시
    EXPOSE 8080

    Write a simple Dockerfile by using vim Dockerfile or an editor like vscode and write the following:

    # base image 를 ubuntu 18.04 로 설정합니다.
    FROM ubuntu:18.04

    # apt-get update 명령을 실행합니다.
    RUN apt-get update

    # TEST env var의 값을 hello 로 지정합니다.
    ENV TEST hello

    # DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
    CMD echo $TEST

    Use the docker build command to create a Docker Image from a Dockerfile.

    docker build --help

    Run the following command from the path where the Dockerfile is located.

    docker build -t my-image:v1.0.0 .

    The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let's check if the image was built successfully.

    # grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
    docker images | grep my-image

    If performed normally, it will output as follows.

    my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

    Let's now run a docker container with the my-image:v1.0.0 image that we just built.

    docker run my-image:v1.0.0

    If performed normally, it will result in the following.

    hello

    Let's run a docker container and change the value of the TEST env var at the time of running the my-image:v1.0.0 image we just built.

    docker run -e TEST=bye my-image:v1.0.0

    If performed normally, it will be as follows.

    bye
    - +Docker images maintain the state in which the commands are executed.

    RUN <command>
    RUN ["executable-command", "parameter1", "parameter2"]

    # 예시
    RUN pip install torch
    RUN pip install -r requirements.txt

    CMD specifies a command that the Docker container will run when it starts. There is a similar command called ENTRYPOINT. The difference between them will be discussed later. Note that only one CMD can be run in one Docker image, which is different from RUN command.

    CMD <command>
    CMD ["executable-command", "parameter1", "parameter2"]
    CMD ["parameter1", "parameter2"] # ENTRYPOINT 와 함께 사용될 때

    # 예시
    CMD python main.py

    WORKDIR is a command that specifies which directory inside the container to perform future additional commands. If the directory does not exist, it will be created.

    WORKDIR /path/to/workdir

    # 예시
    WORKDIR /home/demo
    RUN pwd # /home/demo 가 출력됨

    This is a command to set the value of environment variables that will be used continuously inside the container.

    ENV <KEY> <VALUE>
    ENV <KEY>=<VALUE>

    # 예시
    # default 언어 설정
    RUN locale-gen ko_KR.UTF-8
    ENV LANG ko_KR.UTF-8
    ENV LANGUAGE ko_KR.UTF-8
    ENV LC_ALL ko_KR.UTF-8

    You can specify the port/protocol to be opened from the container. If <protocol> is not specified, TCP is set as the default.

    EXPOSE <port>
    EXPOSE <port>/<protocol>

    # 예시
    EXPOSE 8080

    Write a simple Dockerfile by using vim Dockerfile or an editor like vscode and write the following:

    # base image 를 ubuntu 18.04 로 설정합니다.
    FROM ubuntu:18.04

    # apt-get update 명령을 실행합니다.
    RUN apt-get update

    # TEST env var의 값을 hello 로 지정합니다.
    ENV TEST hello

    # DOCKER CONTAINER 가 시작될 때, 환경변수 TEST 의 값을 출력합니다.
    CMD echo $TEST

    Use the docker build command to create a Docker Image from a Dockerfile.

    docker build --help

    Run the following command from the path where the Dockerfile is located.

    docker build -t my-image:v1.0.0 .

    The command above means to build an image with the name "my-image" and the tag "v1.0.0" from the Dockerfile in the current path. Let's check if the image was built successfully.

    # grep : my-image 가 있는지를 잡아내는 (grep) 하는 명령어
    docker images | grep my-image

    If performed normally, it will output as follows.

    my-image     v1.0.0    143114710b2d   3 seconds ago   87.9MB

    Let's now run a docker container with the my-image:v1.0.0 image that we just built.

    docker run my-image:v1.0.0

    If performed normally, it will result in the following.

    hello

    Let's run a docker container and change the value of the TEST env var at the time of running the my-image:v1.0.0 image we just built.

    docker run -e TEST=bye my-image:v1.0.0

    If performed normally, it will be as follows.

    bye
    + \ No newline at end of file diff --git a/en/docs/prerequisites/docker/index.html b/en/docs/prerequisites/docker/index.html index fbb7e5bb..51a892d0 100644 --- a/en/docs/prerequisites/docker/index.html +++ b/en/docs/prerequisites/docker/index.html @@ -7,7 +7,7 @@ - + @@ -15,8 +15,8 @@
    Version: Next

    What is Docker?

    Container

    • Containerization:
      • A technology that allows applications to be executed uniformly anywhere.
    • Container Image:
      • A collection of all the files required to run an application.
      • → Similar to a mold for making fish-shaped bread (Bungeoppang).
    • Container:
      • A single process that is executed based on a container image.
      • → A fish-shaped bread (Bungeoppang) produced using a mold.

    Docker

    Docker is a platform that allows you to manage and use containers.
    Its slogan is "Build Once, Run Anywhere," guaranteeing the same execution results anywhere.

    In the Docker, the resources for the container are separated and the lifecycle is controlled by Linux kernel's cgroups, etc.
    However, it is too difficult to use these interfaces directly, so an abstraction layer is created.

    docker-layer.png

    Through this, users can easily control containers with just the user-friendly API Docker CLI.

    • Users can easily control containers using the user-friendly API called Docker CLI.

    Interpretation of Layer

    The roles of the layers mentioned above are as follows:

    1. runC: Utilizes the functionality of the Linux kernel to isolate namespaces, CPUs, memory, filesystems, etc., for a container, which is a single process.
    2. containerd: Acts as an abstraction layer to communicate with runC (OCI layer) and uses the standardized interface (OCI).
    3. dockerd: Solely responsible for issuing commands to containerd.
    4. Docker CLI: Users only need to issue commands to dockerd (Docker daemon) using Docker CLI.
      • During this communication process, Unix socket is used, so sometimes Docker-related errors occur, such as "the /var/run/docker.sock is in use" or "insufficient permissions" error messages.

    Although Docker encompasses many stages, when the term "Docker" is used, it can refer to Docker CLI, Dockerd (Docker daemon), or even a single Docker container, which can lead to confusion.
    -In the upcoming text, the term "Docker" may be used in various contexts.

    For ML Engineer

    ML engineers use Docker for the following reasons:

    1. ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages.
    2. Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this.
    3. Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.
    - +In the upcoming text, the term "Docker" may be used in various contexts.

    For ML Engineer

    ML engineers use Docker for the following reasons:

    1. ML training/inference code needs to be independent of the underlying operating system, Python version, Python environment, and specific versions of Python packages.
    2. Therefore, the goal is to bundle not only the code but also all the dependent packages, environment variables, folder names, etc., into a single package. Containerization technology enables this.
    3. Docker is one of the software tools that makes it easy to use and manage this technology, and the packaged units are referred to as Docker images.
    + \ No newline at end of file diff --git a/en/docs/prerequisites/docker/install/index.html b/en/docs/prerequisites/docker/install/index.html index 7e4c71b4..be9f0d2b 100644 --- a/en/docs/prerequisites/docker/install/index.html +++ b/en/docs/prerequisites/docker/install/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    Install Docker

    Docker

    To practice Docker, you need to install Docker.
    The Docker installation varies depending on which OS you are using.
    -Please refer to the official website for the Docker installation that fits your environment:

    Check Installation

    Check installation requires an OS, terminal environment where docker run hello-world runs correctly.

    OSDocker EngineTerminal
    MacOSDocker Desktopzsh
    WindowsDocker DesktopPowershell
    WindowsDocker DesktopWSL2
    UbuntuDocker Enginebash

    Before diving in..

    It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps.

    - +Please refer to the official website for the Docker installation that fits your environment:

    Check Installation

    Check installation requires an OS, terminal environment where docker run hello-world runs correctly.

    OSDocker EngineTerminal
    MacOSDocker Desktopzsh
    WindowsDocker DesktopPowershell
    WindowsDocker DesktopWSL2
    UbuntuDocker Enginebash

    Before diving in..

    It is possible that many metaphors and examples will be focused towards MLOps as they explain the necessary Docker usage to use MLOps.

    + \ No newline at end of file diff --git a/en/docs/prerequisites/docker/introduction/index.html b/en/docs/prerequisites/docker/introduction/index.html index f7fad5de..f28dffdf 100644 --- a/en/docs/prerequisites/docker/introduction/index.html +++ b/en/docs/prerequisites/docker/introduction/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ However, the initial Kubernetes included Docker Engine for container virtualization.
    Therefore, whenever the Docker version was updated, the interface of Docker Engine changed and Kubernetes was greatly affected.

    Open Container Initiative

    In order to alleviate such inconveniences, many groups interested in container technology such as Google have come together to start the Open Container Initiative (OCI) project to set standards for containers.
    Docker further separated its interface and developed Containerd, a Container Runtime that adheres to the OCI standard, and added an abstraction layer so that dockerd calls the API of Containerd.

    In accordance with this flow, Kubernetes also now supports not only Docker, but any Container Runtime that adheres to the OCI standard and the specified specifications with the Container Runtime Interface (CRI) specification, starting from version 1.5.

    CRI-O

    CRI-O is a container runtime developed by Red Hat, Intel, SUSE, and IBM, which adheres to the OCI standard + CRI specifications, specifically for Kubernetes.

    Current docker & kubernetes

    Currently, Docker and Kubernetes have been using Docker Engine as the default container runtime, but since Docker's API did not match the CRI specification (OCI follows), Kubernetes developed and supported a dockershim to make Docker's API compatible with CRI, (it was a huge burden for Kubernetes, not for Docker). This was deprecated from Kubernetes v1.20 and abandoned from v1.23.

    • v1.23 will be released in December 2021

    So from Kubernetes v1.23, you can no longer use Docker natively. -However, users are not much affected by this change because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of.

    References

    - +However, users are not much affected by this change because Docker images created through Docker Engine comply with the OCI standard, so they can be used regardless of what container runtime Kubernetes is made of.

    References

    + \ No newline at end of file diff --git a/en/docs/setup-components/install-components-kf/index.html b/en/docs/setup-components/install-components-kf/index.html index e692bb67..14e494a9 100644 --- a/en/docs/setup-components/install-components-kf/index.html +++ b/en/docs/setup-components/install-components-kf/index.html @@ -7,7 +7,7 @@ - + @@ -24,8 +24,8 @@ virtualservice.networking.istio.io/jupyter-web-app-jupyter-web-app created

    Wait until one pod is Running.

    English: We will install the Profile Controller.

    kustomize build apps/profiles/upstream/overlays/kubeflow | kubectl apply -f -

    If performed normally, it will be outputted as follows.

    customresourcedefinition.apiextensions.k8s.io/profiles.kubeflow.org created
    serviceaccount/profiles-controller-service-account created
    role.rbac.authorization.k8s.io/profiles-leader-election-role created
    rolebinding.rbac.authorization.k8s.io/profiles-leader-election-rolebinding created
    clusterrolebinding.rbac.authorization.k8s.io/profiles-cluster-role-binding created
    configmap/namespace-labels-data-48h7kd55mc created
    configmap/profiles-config-46c7tgh6fd created
    service/profiles-kfam created
    deployment.apps/profiles-deployment created
    virtualservice.networking.istio.io/profiles-kfam created

    Check to see if it is installed normally.

    kubectl get po -n kubeflow | grep profiles-deployment

    Wait until one pod is running.

    profiles-deployment-89f7d88b-qsnrd                       2/2     Running   0          42s

    Install the Volumes Web App.

    kustomize build apps/volumes-web-app/upstream/overlays/istio | kubectl apply -f -

    If performed normally, it will be output as follows.

    serviceaccount/volumes-web-app-service-account created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-cluster-role created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-admin created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-edit created
    clusterrole.rbac.authorization.k8s.io/volumes-web-app-kubeflow-volume-ui-view created
    clusterrolebinding.rbac.authorization.k8s.io/volumes-web-app-cluster-role-binding created
    configmap/volumes-web-app-parameters-4gg8cm2gmk created
    service/volumes-web-app-service created
    deployment.apps/volumes-web-app-deployment created
    virtualservice.networking.istio.io/volumes-web-app-volumes-web-app created

    Check if it is installed normally.

    kubectl get po -n kubeflow | grep volumes-web-app

    Wait until one pod is running.

    volumes-web-app-deployment-8589d664cc-62svl              1/1     Running   0          27s
    Install Tensorboard Web App.

    Service account/tensorboards-web-app-service-account created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-admin created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-edit created, Cluster role.rbac.authorization.k8s.io/tensorboards-web-app-kubeflow-tensorboard-ui-view created, Cluster role binding.rbac.authorization.k8s.io/tensorboards-web-app-cluster-role-binding created, Config map/tensorboards-web-app-parameters-g28fbd6cch created, Service/tensorboards-web-app-service created, Deployment.apps/tensorboards-web-app-deployment created, and Virtual service.networking.istio.io/t
    Check if it is installed correctly.
    ```bash
    Deployment "tensorboard-web-app-deployment-6ff79b7f44-qbzmw" created
    deployment.apps/tensorboard-controller-controller-manager created

    A custom resource definition for 'tensorboards.tensorboard.kubeflow.org' was created, along with a service account, roles, role bindings, a config map, and a deployment for the controller manager metrics service. Check if the deployment.apps/tensorboard-controller-controller-manager was installed correctly. Wait for 1 pod to be Running. Translation: Installing Training Operator.

    kustomize build apps/training-operator/upstream/overlays/kubeflow | kubectl apply -f -

    If performed normally, it will be output as follows.

    customresourcedefinition.apiextensions.k8s.io/mxjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/pytorchjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/tfjobs.kubeflow.org created
    customresourcedefinition.apiextensions.k8s.io/xgboostjobs.kubeflow.org created
    serviceaccount/training-operator created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-admin created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-edit created
    clusterrole.rbac.authorization.k8s.io/kubeflow-training-view created
    clusterrole.rbac.authorization.k8s.io/training-operator created
    clusterrolebinding.rbac.authorization.k8s.io/training-operator created
    service/training-operator created
    deployment.apps/training-operator created

    Check to see if it has been installed normally.

    kubectl get po -n kubeflow | grep training-operator

    Wait until one pod is up and running.

    training-operator-7d98f9dd88-6887f                          1/1     Running   0          28s

    User Namespace

    For using Kubeflow, create a Kubeflow Profile for the User to be used.

    kustomize build common/user-namespace/base | kubectl apply -f -

    If performed normally, it will be outputted as follows.

    configmap/default-install-config-9h2h2b6hbk created
    profile.kubeflow.org/kubeflow-user-example-com created

    Confirm that the kubeflow-user-example-com profile has been created.

    kubectl get profile
    kubeflow-user-example-com   37s

    Check installation

    Confirm successful installation by port forwarding to access Kubeflow central dashboard with web browser.

    kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80

    Open a web browser and connect to http://localhost:8080 to confirm that the following screen is displayed. -login-ui

    Enter the following connection information to connect.

    • Email Address: user@example.com
    • Password: 12341234

    central-dashboard

    - +login-ui

    Enter the following connection information to connect.

    • Email Address: user@example.com
    • Password: 12341234

    central-dashboard

    + \ No newline at end of file diff --git a/en/docs/setup-components/install-components-mlflow/index.html b/en/docs/setup-components/install-components-mlflow/index.html index b9bf05b6..0e370c86 100644 --- a/en/docs/setup-components/install-components-mlflow/index.html +++ b/en/docs/setup-components/install-components-mlflow/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ However, in order to separate it for kubeflow and mlflow purposes, we will create a mlflow-specific bucket.
    First, port-forward the minio-service to access Minio and create the bucket.

    kubectl port-forward svc/minio-service -n kubeflow 9000:9000

    Open a web browser and connect to localhost:9000 to display the following screen.

    minio-install

    Enter the following credentials to log in:

    • Username: minio
    • Password: minio123

    Click the + button on the right side bottom, then click Create Bucket.

    create-bucket

    Enter mlflow in Bucket Name to create the bucket.

    If successfully created, you will see a bucket named mlflow on the left. mlflow-bucket


    Let's Install MLflow Tracking Server

    Add Helm Repository

    helm repo add mlops-for-all https://mlops-for-all.github.io/helm-charts

    If the following message is displayed, it means it has been added successfully.

    "mlops-for-all" has been added to your repositories

    Update Helm Repository

    helm repo update

    If the following message is displayed, it means that the update has been successfully completed.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "mlops-for-all" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    Install mlflow-server Helm Chart version 0.2.0.

    helm install mlflow-server mlops-for-all/mlflow-server \
    --namespace mlflow-system \
    --version 0.2.0
    • The above Helm chart installs MLflow with the connection information for its backend store and artifacts store set to the default minio created during the Kubeflow installation process and the postgresql information created from the PostgreSQL DB installation above.
      • If you want to use a separate DB or object storage, please refer to the Helm Chart Repo and set the values separately during helm install.

    The following message should be displayed:

    NAME: mlflow-server
    LAST DEPLOYED: Sat Dec 18 22:02:13 2021
    NAMESPACE: mlflow-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    Check to see if it was installed normally.

    kubectl get pod -n mlflow-system | grep mlflow-server

    Wait until one mlflow-server related pod is running in the mlflow-system namespace.
    -If it is output similar to the following, then it has been successfully executed.

    mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

    Check installation

    Let's now check if we can successfully connect to the MLflow Server.

    First, we will perform port forwarding in order to connect from the client node.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open a web browser and connect to localhost:5000 and the following screen will be output.

    mlflow-install

    - +If it is output similar to the following, then it has been successfully executed.

    mlflow-server-ffd66d858-6hm62        1/1     Running   0          74s

    Check installation

    Let's now check if we can successfully connect to the MLflow Server.

    First, we will perform port forwarding in order to connect from the client node.

    kubectl port-forward svc/mlflow-server-service -n mlflow-system 5000:5000

    Open a web browser and connect to localhost:5000 and the following screen will be output.

    mlflow-install

    + \ No newline at end of file diff --git a/en/docs/setup-components/install-components-pg/index.html b/en/docs/setup-components/install-components-pg/index.html index 0d549f80..2087499e 100644 --- a/en/docs/setup-components/install-components-pg/index.html +++ b/en/docs/setup-components/install-components-pg/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    4. Prometheus & Grafana

    Prometheus & Grafana

    Prometheus and Grafana are tools for monitoring.
    For stable service operation, it is necessary to continuously observe the status of the service and infrastructure where the service is operating, and to respond quickly based on the observed metrics when a problem arises.
    -Among the many tools to efficiently perform such monitoring, Everyone's MLOps will use open source Prometheus and Grafana.

    For more information, please refer to the Prometheus Official Documentation and Grafana Official Documentation.

    Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other.

    In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully.

    We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository.

    Add Helm Repository

    helm repo add seldonio https://storage.googleapis.com/seldon-charts

    If the following message is output, it means that it has been added successfully.

    "seldonio" has been added to your repositories

    Update Helm Repository

    helm repo update

    If the following message is displayed, it means that the update was successful.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "seldonio" chart repository
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    Install version 1.12.0 of the seldon-core-analytics Helm Chart.

    helm install seldon-core-analytics seldonio/seldon-core-analytics \
    --namespace seldon-system \
    --version 1.12.0

    The following message should be output.

    Skip...
    NAME: seldon-core-analytics
    LAST DEPLOYED: Tue Dec 14 18:29:38 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1

    Check to see if it was installed normally.

    kubectl get pod -n seldon-system | grep seldon-core-analytics

    Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace.

    seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
    seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
    seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
    seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
    seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
    seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

    Check installation

    Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Open the web browser and connect to localhost:8090, then the following screen will be displayed.

    grafana-install

    Enter the following connection information to connect.

    • Email or username: admin
    • Password: password

    When you log in, the following screen will be displayed.

    grafana-login

    Click the dashboard icon on the left and click the Manage button.

    dashboard-click

    You can see that the basic Grafana dashboard is included. Click the Prediction Analytics dashboard among them.

    dashboard

    The Seldon Core API Dashboard is visible and can be confirmed with the following output.

    seldon-dashboard

    References

    - +Among the many tools to efficiently perform such monitoring, Everyone's MLOps will use open source Prometheus and Grafana.

    For more information, please refer to the Prometheus Official Documentation and Grafana Official Documentation.

    Prometheus is a tool to collect metrics from various targets, and Grafana is a tool to help visualize the gathered data. Although there is no dependency between them, they are often used together complementary to each other.

    In this page, we will install Prometheus and Grafana on a Kubernetes cluster, then send API requests to a SeldonDeployment created with Seldon-Core and check if metrics are collected successfully.

    We also install a dashboard to efficiently monitor the metrics of the SeldonDeployment created in Seldon-Core using Helm Chart version 1.12.0 from seldonio/seldon-core-analytics Helm Repository.

    Add Helm Repository

    helm repo add seldonio https://storage.googleapis.com/seldon-charts

    If the following message is output, it means that it has been added successfully.

    "seldonio" has been added to your repositories

    Update Helm Repository

    helm repo update

    If the following message is displayed, it means that the update was successful.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "seldonio" chart repository
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Helm Install

    Install version 1.12.0 of the seldon-core-analytics Helm Chart.

    helm install seldon-core-analytics seldonio/seldon-core-analytics \
    --namespace seldon-system \
    --version 1.12.0

    The following message should be output.

    Skip...
    NAME: seldon-core-analytics
    LAST DEPLOYED: Tue Dec 14 18:29:38 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1

    Check to see if it was installed normally.

    kubectl get pod -n seldon-system | grep seldon-core-analytics

    Wait until 6 seldon-core-analytics related pods are Running in the seldon-system namespace.

    seldon-core-analytics-grafana-657c956c88-ng8wn                  2/2     Running   0          114s
    seldon-core-analytics-kube-state-metrics-94bb6cb9-svs82 1/1 Running 0 114s
    seldon-core-analytics-prometheus-alertmanager-64cf7b8f5-nxbl8 2/2 Running 0 114s
    seldon-core-analytics-prometheus-node-exporter-5rrj5 1/1 Running 0 114s
    seldon-core-analytics-prometheus-pushgateway-8476474cff-sr4n6 1/1 Running 0 114s
    seldon-core-analytics-prometheus-seldon-685c664894-7cr45 2/2 Running 0 114s

    Check installation

    Let's now check if we can connect to Grafana normally. First, we will port forward to connect to the client node.

    kubectl port-forward svc/seldon-core-analytics-grafana -n seldon-system 8090:80

    Open the web browser and connect to localhost:8090, then the following screen will be displayed.

    grafana-install

    Enter the following connection information to connect.

    • Email or username: admin
    • Password: password

    When you log in, the following screen will be displayed.

    grafana-login

    Click the dashboard icon on the left and click the Manage button.

    dashboard-click

    You can see that the basic Grafana dashboard is included. Click the Prediction Analytics dashboard among them.

    dashboard

    The Seldon Core API Dashboard is visible and can be confirmed with the following output.

    seldon-dashboard

    References

    + \ No newline at end of file diff --git a/en/docs/setup-components/install-components-seldon/index.html b/en/docs/setup-components/install-components-seldon/index.html index 5119788f..7f89dbe2 100644 --- a/en/docs/setup-components/install-components-seldon/index.html +++ b/en/docs/setup-components/install-components-seldon/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    3. Seldon-Core

    Seldon-Core

    Seldon-Core is one of the open source frameworks that can deploy and manage numerous machine learning models in Kubernetes environments.
    For more details, please refer to the official product description page and GitHub of Seldon-Core and API Deployment part.

    Installing Seldon-Core

    In order to use Seldon-Core, modules such as Ambassador, which is responsible for Ingress of Kubernetes, and Istio are required here.
    -Seldon-Core officially supports only Ambassador and Istio, and MLOps for everyone will use Ambassador to use Seldon-core, so we will install Ambassador.

    Adding Ambassador to the Helm Repository

    helm repo add datawire https://www.getambassador.io

    If the following message is displayed, it means it has been added normally.

    "datawire" has been added to your repositories

    Update Ambassador - Helm Repository

    helm repo update

    If the following message is output, it means that the update has been completed normally.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Ambassador - Helm Install

    Install version 6.9.3 of the Ambassador Chart.

    helm install ambassador datawire/ambassador \
    --namespace seldon-system \
    --create-namespace \
    --set image.repository=quay.io/datawire/ambassador \
    --set enableAES=false \
    --set crds.keep=false \
    --version 6.9.3

    The following message should be displayed.

    생략...

    W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
    W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
    NAME: ambassador
    LAST DEPLOYED: Mon Dec 6 17:01:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    NOTES:
    -------------------------------------------------------------------------------
    Congratulations! You've successfully installed Ambassador!

    -------------------------------------------------------------------------------
    To get the IP address of Ambassador, run the following commands:
    NOTE: It may take a few minutes for the LoadBalancer IP to be available.
    You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

    On GKE/Azure:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

    On AWS:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

    echo http://$SERVICE_IP:

    For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

    Wait until four pods become running in the seldon-system.

    kubectl get pod -n seldon-system
    ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
    ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
    ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
    ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

    Seldon-Core - Helm Install

    Install version 1.11.2 of the seldon-core-operator Chart.

    helm install seldon-core seldon-core-operator \
    --repo https://storage.googleapis.com/seldon-charts \
    --namespace seldon-system \
    --set usageMetrics.enabled=true \
    --set ambassador.enabled=true \
    --version 1.11.2

    The following message should be displayed.

    Skip...

    W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
    NAME: seldon-core
    LAST DEPLOYED: Mon Dec 6 17:05:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    Wait until one seldon-controller-manager pod is Running in the seldon-system namespace.

    kubectl get pod -n seldon-system | grep seldon-controller
    seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

    References

    - +Seldon-Core officially supports only Ambassador and Istio, and MLOps for everyone will use Ambassador to use Seldon-core, so we will install Ambassador.

    Adding Ambassador to the Helm Repository

    helm repo add datawire https://www.getambassador.io

    If the following message is displayed, it means it has been added normally.

    "datawire" has been added to your repositories

    Update Ambassador - Helm Repository

    helm repo update

    If the following message is output, it means that the update has been completed normally.

    Hang tight while we grab the latest from your chart repositories...
    ...Successfully got an update from the "datawire" chart repository
    Update Complete. ⎈Happy Helming!

    Ambassador - Helm Install

    Install version 6.9.3 of the Ambassador Chart.

    helm install ambassador datawire/ambassador \
    --namespace seldon-system \
    --create-namespace \
    --set image.repository=quay.io/datawire/ambassador \
    --set enableAES=false \
    --set crds.keep=false \
    --version 6.9.3

    The following message should be displayed.

    생략...

    W1206 17:01:36.026326 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
    W1206 17:01:36.029764 26635 warnings.go:70] rbac.authorization.k8s.io/v1beta1 RoleBinding is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 RoleBinding
    NAME: ambassador
    LAST DEPLOYED: Mon Dec 6 17:01:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    NOTES:
    -------------------------------------------------------------------------------
    Congratulations! You've successfully installed Ambassador!

    -------------------------------------------------------------------------------
    To get the IP address of Ambassador, run the following commands:
    NOTE: It may take a few minutes for the LoadBalancer IP to be available.
    You can watch the status of by running 'kubectl get svc -w --namespace seldon-system ambassador'

    On GKE/Azure:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].ip}')

    On AWS:
    export SERVICE_IP=$(kubectl get svc --namespace seldon-system ambassador -o jsonpath='{.status.loadBalancer.ingress[0].hostname}')

    echo http://$SERVICE_IP:

    For help, visit our Slack at http://a8r.io/Slack or view the documentation online at https://www.getambassador.io.

    Wait until four pods become running in the seldon-system.

    kubectl get pod -n seldon-system
    ambassador-7f596c8b57-4s9xh                  1/1     Running   0          7m15s
    ambassador-7f596c8b57-dt6lr 1/1 Running 0 7m15s
    ambassador-7f596c8b57-h5l6f 1/1 Running 0 7m15s
    ambassador-agent-77bccdfcd5-d5jxj 1/1 Running 0 7m15s

    Seldon-Core - Helm Install

    Install version 1.11.2 of the seldon-core-operator Chart.

    helm install seldon-core seldon-core-operator \
    --repo https://storage.googleapis.com/seldon-charts \
    --namespace seldon-system \
    --set usageMetrics.enabled=true \
    --set ambassador.enabled=true \
    --version 1.11.2

    The following message should be displayed.

    Skip...

    W1206 17:05:38.336391 28181 warnings.go:70] admissionregistration.k8s.io/v1beta1 ValidatingWebhookConfiguration is deprecated in v1.16+, unavailable in v1.22+; use admissionregistration.k8s.io/v1 ValidatingWebhookConfiguration
    NAME: seldon-core
    LAST DEPLOYED: Mon Dec 6 17:05:34 2021
    NAMESPACE: seldon-system
    STATUS: deployed
    REVISION: 1
    TEST SUITE: None

    Wait until one seldon-controller-manager pod is Running in the seldon-system namespace.

    kubectl get pod -n seldon-system | grep seldon-controller
    seldon-controller-manager-8457b8b5c7-r2frm   1/1     Running   0          2m22s

    References

    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/install-kubernetes-module/index.html b/en/docs/setup-kubernetes/install-kubernetes-module/index.html index 4656e8db..74734373 100644 --- a/en/docs/setup-kubernetes/install-kubernetes-module/index.html +++ b/en/docs/setup-kubernetes/install-kubernetes-module/index.html @@ -7,14 +7,14 @@ - +
    Version: Next

    5. Install Kubernetes Modules

    Setup Kubernetes Modules

    On this page, we will explain how to install the modules that will be used on the cluster from the client nodes.
    -All the processes introduced here will be done on the client nodes.

    Helm

    Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once.

    1. Download Helm version 3.7.1 into the current folder.
    • For Linux amd64

      wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
    • Other OS refer to the official website for the download path of the binary that matches the OS and CPU of your client node.

    1. Unzip the file to use helm and move the file to its desired location.

      tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
      sudo mv linux-amd64/helm /usr/local/bin/helm
    2. Check to see if the installation was successful:

      helm help

      If you see the following message, it means that it has been installed normally.

      The Kubernetes package manager

      Common actions for Helm:
    • helm search: search for charts

    • helm pull: download a chart to your local directory to view

    • helm install: upload the chart to Kubernetes

    • helm list: list releases of charts

      Environment variables:

      NameDescription
      $HELM_CACHE_HOMEset an alternative location for storing cached files.
      $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
      $HELM_DATA_HOMEset an alternative location for storing Helm data.

      ...


    Kustomize

    Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once.

    1. Download the binary version of kustomize v3.10.0 in the current folder.
    • For Linux amd64

      wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
    • Other OS can be downloaded from kustomize/v3.10.0 after checking.

    1. Unzip to use kustomize, and change the file location.

      tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
      sudo mv kustomize /usr/local/bin/kustomize
    2. Check if it is installed correctly.

      kustomize help

      If you see the following message, it means that it has been installed normally.

      Manages declarative configuration of Kubernetes.
      See https://sigs.k8s.io/kustomize

      Usage:
      kustomize [command]

      Available Commands:
      build Print configuration per contents of kustomization.yaml
      cfg Commands for reading and writing configuration.
      completion Generate shell completion script
      create Create a new kustomization in the current directory
      edit Edits a kustomization file
      fn Commands for running functions against configuration.
      ...

    CSI Plugin : Local Path Provisioner

    1. The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters.

      kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

      If you see the following messages, it means that the installation was successful:

      namespace/local-path-storage created
      serviceaccount/local-path-provisioner-service-account created
      clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
      clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
      deployment.apps/local-path-provisioner created
      storageclass.storage.k8s.io/local-path created
      configmap/local-path-config created
    2. Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:

      kubectl -n local-path-storage get pod

    If successful, it will display the following output:

    NAME                                     READY     STATUS    RESTARTS   AGE
    local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
    1. Execute the following command to change the default storage class:

      kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

      If the command is successful, the following output will be displayed:

      storageclass.storage.k8s.io/local-path patched
    2. Verify that the default storage class has been set:

      kubectl get sc

      Check if there is a storage class with the name local-path (default) in the NAME column:

      NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
      local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
    - +All the processes introduced here will be done on the client nodes.

    Helm

    Helm is one of the package management tools that helps to deploy and manage resources related to Kubernetes packages at once.

    1. Download Helm version 3.7.1 into the current folder.
    • For Linux amd64

      wget https://get.helm.sh/helm-v3.7.1-linux-amd64.tar.gz
    • Other OS refer to the official website for the download path of the binary that matches the OS and CPU of your client node.

    1. Unzip the file to use helm and move the file to its desired location.

      tar -zxvf helm-v3.7.1-linux-amd64.tar.gz
      sudo mv linux-amd64/helm /usr/local/bin/helm
    2. Check to see if the installation was successful:

      helm help

      If you see the following message, it means that it has been installed normally.

      The Kubernetes package manager

      Common actions for Helm:
    • helm search: search for charts

    • helm pull: download a chart to your local directory to view

    • helm install: upload the chart to Kubernetes

    • helm list: list releases of charts

      Environment variables:

      NameDescription
      $HELM_CACHE_HOMEset an alternative location for storing cached files.
      $HELM_CONFIG_HOMEset an alternative location for storing Helm configuration.
      $HELM_DATA_HOMEset an alternative location for storing Helm data.

      ...


    Kustomize

    Kustomize is one of the package management tools that helps to deploy and manage multiple Kubernetes resources at once.

    1. Download the binary version of kustomize v3.10.0 in the current folder.
    • For Linux amd64

      wget https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv3.10.0/kustomize_v3.10.0_linux_amd64.tar.gz
    • Other OS can be downloaded from kustomize/v3.10.0 after checking.

    1. Unzip to use kustomize, and change the file location.

      tar -zxvf kustomize_v3.10.0_linux_amd64.tar.gz
      sudo mv kustomize /usr/local/bin/kustomize
    2. Check if it is installed correctly.

      kustomize help

      If you see the following message, it means that it has been installed normally.

      Manages declarative configuration of Kubernetes.
      See https://sigs.k8s.io/kustomize

      Usage:
      kustomize [command]

      Available Commands:
      build Print configuration per contents of kustomization.yaml
      cfg Commands for reading and writing configuration.
      completion Generate shell completion script
      create Create a new kustomization in the current directory
      edit Edits a kustomization file
      fn Commands for running functions against configuration.
      ...

    CSI Plugin : Local Path Provisioner

    1. The CSI Plugin is a module that is responsible for storage within Kubernetes. Install the CSI Plugin, Local Path Provisioner, which is easy to use in single node clusters.

      kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.20/deploy/local-path-storage.yaml

      If you see the following messages, it means that the installation was successful:

      namespace/local-path-storage created
      serviceaccount/local-path-provisioner-service-account created
      clusterrole.rbac.authorization.k8s.io/local-path-provisioner-role created
      clusterrolebinding.rbac.authorization.k8s.io/local-path-provisioner-bind created
      deployment.apps/local-path-provisioner created
      storageclass.storage.k8s.io/local-path created
      configmap/local-path-config created
    2. Also, check if the provisioner pod in the local-path-storage namespace is Running by executing the following command:

      kubectl -n local-path-storage get pod

    If successful, it will display the following output:

    NAME                                     READY     STATUS    RESTARTS   AGE
    local-path-provisioner-d744ccf98-xfcbk 1/1 Running 0 7m
    1. Execute the following command to change the default storage class:

      kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'

      If the command is successful, the following output will be displayed:

      storageclass.storage.k8s.io/local-path patched
    2. Verify that the default storage class has been set:

      kubectl get sc

      Check if there is a storage class with the name local-path (default) in the NAME column:

      NAME                   PROVISIONER             RECLAIMPOLICY   VOLUMEBINDINGMODE      ALLOWVOLUMEEXPANSION   AGE
      local-path (default) rancher.io/local-path Delete WaitForFirstConsumer false 2h
    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html b/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html index eaee5336..39a66898 100644 --- a/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html +++ b/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-k3s/index.html @@ -7,7 +7,7 @@ - + @@ -16,8 +16,8 @@ However, we need to use docker as the backend to use GPU, so we will install the backend with the --docker option.

    curl -sfL https://get.k3s.io | INSTALL_K3S_VERSION=v1.21.7+k3s1 sh -s - server --disable traefik --disable servicelb --disable local-storage --docker

    After installing k3s, check the k3s config.

    sudo cat /etc/rancher/k3s/k3s.yaml

    If installed correctly, the following items will be output. (Security related keys are hidden with <...>.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://127.0.0.1:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    2. Setup Kubernetes Cluster

    Set up the Kubernetes cluster by copying the k3s config to be used as the cluster’s kubeconfig.

    mkdir .kube
    sudo cp /etc/rancher/k3s/k3s.yaml .kube/config

    Grant user access permission to the copied config file.

    sudo chown $USER:$USER .kube/config

    3. Setup Kubernetes Client

    Now move the kubeconfig configured in the cluster to the local. Set the path to ~/.kube/config on the local.

    The config file copied at first has the server ip set to https://127.0.0.1:6443. Modify this value to match the ip of the cluster. -(We modified it to https://192.168.0.19:6443 to match the ip of the cluster used in this page.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://192.168.0.19:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If you see the following message, it means that the installation was successful.

    NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

    6. References

    - +(We modified it to https://192.168.0.19:6443 to match the ip of the cluster used in this page.)

    apiVersion: v1
    clusters:
    - cluster:
    certificate-authority-data:
    <...>
    server: https://192.168.0.19:6443
    name: default
    contexts:
    - context:
    cluster: default
    user: default
    name: default
    current-context: default
    kind: Config
    preferences: {}
    users:
    - name: default
    user:
    client-certificate-data:
    <...>
    client-key-data:
    <...>

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If you see the following message, it means that the installation was successful.

    NAME    STATUS   ROLES                  AGE   VERSION        INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 11m v1.21.7+k3s1 192.168.0.19 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11

    6. References

    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html b/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html index 9c4ecfe5..ad8298f1 100644 --- a/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html +++ b/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-kubeadm/index.html @@ -7,13 +7,13 @@ - +
    -
    Version: Next

    4.3. Kubeadm

    1. Prerequisite

    Before building a Kubernetes cluster, install the necessary components to the cluster.

    Please refer to Install Prerequisite and install the necessary components to the cluster.

    Change the configuration of the network for Kubernetes.

    sudo modprobe br_netfilter

    cat <<EOF | sudo tee /etc/modules-load.d/k8s.conf
    br_netfilter
    EOF

    cat <<EOF | sudo tee /etc/sysctl.d/k8s.conf
    net.bridge.bridge-nf-call-ip6tables = 1
    net.bridge.bridge-nf-call-iptables = 1
    EOF
    sudo sysctl --system

    2. Setup Kubernetes Cluster

    • kubeadm : Automates the installation process by registering kubelet as a service and issuing certificates for communication between cluster components.
    • kubelet : Container handler responsible for starting and stopping container resources.
    • kubectl : CLI tool used to interact with and manage Kubernetes clusters from the terminal.

    Install kubeadm, kubelet, and kubectl using the following commands. It's important to prevent accidental changes to the versions of these components, as it can lead to unexpected issues.

    sudo apt-get update
    sudo apt-get install -y apt-transport-https ca-certificates curl &&
    sudo curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg &&
    echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list &&
    sudo apt-get update
    sudo apt-get install -y kubelet=1.21.7-00 kubeadm=1.21.7-00 kubectl=1.21.7-00 &&
    sudo apt-mark hold kubelet kubeadm kubectl

    Check if kubeadm, kubelet, and kubectl are installed correctly.

    mlops@ubuntu:~$ kubeadm version
    kubeadm version: &version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:40:08Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    mlops@ubuntu:~$ kubelet --version
    Kubernetes v1.21.7
    mlops@ubuntu:~$ kubectl version --client
    Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}

    Now we will use kubeadm to install Kubernetes.

    kubeadm config images list
    kubeadm config images pull

    sudo kubeadm init --pod-network-cidr=10.244.0.0/16

    Through kubectl, copy the admin certificate to the path $HOME/.kube/config to control the Kubernetes cluster.

    mkdir -p $HOME/.kube
    sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
    sudo chown $(id -u):$(id -g) $HOME/.kube/config

    Install CNI. There are various kinds of CNI, which is responsible for setting up the network inside Kubernetes, and in MLOps for All, flannel is used.

    kubectl apply -f https://raw.githubusercontent.com/flannel-io/flannel/v0.13.0/Documentation/kube-flannel.yml

    There are two types of Kubernetes nodes: Master Node and Worker Node. For stability, it is generally recommended that only tasks to control the Kubernetes cluster are run on the Master Node, however this manual assumes a single cluster, so all types of tasks can be run on the Master Node.

    kubectl taint nodes --all node-role.kubernetes.io/master-

    3. Setup Kubernetes Client

    Copy the kubeconfig file created in the cluster to the client to control the cluster through kubectl.

    mkdir -p $HOME/.kube
    scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes

    When the node is in the "Ready" state, the output will be similar to the following:

    NAME     STATUS   ROLES                  AGE     VERSION
    ubuntu Ready control-plane,master 2m55s v1.21.7

    6. References

    - +
    Version: Next

    4.3. Kubeadm

    1. Prerequisite

    Before building a Kubernetes cluster, install the necessary components to the cluster.

    Please refer to Install Prerequisite and install the necessary components to the cluster.

    Change the configuration of the network for Kubernetes.

    sudo modprobe br_netfilter

    cat <<EOF | sudo tee /etc/modules-load.d/k8s.conf
    br_netfilter
    EOF

    cat <<EOF | sudo tee /etc/sysctl.d/k8s.conf
    net.bridge.bridge-nf-call-ip6tables = 1
    net.bridge.bridge-nf-call-iptables = 1
    EOF
    sudo sysctl --system

    2. Setup Kubernetes Cluster

    • kubeadm : Automates the installation process by registering kubelet as a service and issuing certificates for communication between cluster components.
    • kubelet : Container handler responsible for starting and stopping container resources.
    • kubectl : CLI tool used to interact with and manage Kubernetes clusters from the terminal.

    Install kubeadm, kubelet, and kubectl using the following commands. It's important to prevent accidental changes to the versions of these components, as it can lead to unexpected issues.

    sudo apt-get update
    sudo apt-get install -y apt-transport-https ca-certificates curl &&
    sudo curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg &&
    echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list &&
    sudo apt-get update
    sudo apt-get install -y kubelet=1.21.7-00 kubeadm=1.21.7-00 kubectl=1.21.7-00 &&
    sudo apt-mark hold kubelet kubeadm kubectl

    Check if kubeadm, kubelet, and kubectl are installed correctly.

    mlops@ubuntu:~$ kubeadm version
    kubeadm version: &version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:40:08Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    mlops@ubuntu:~$ kubelet --version
    Kubernetes v1.21.7
    mlops@ubuntu:~$ kubectl version --client
    Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}

    Now we will use kubeadm to install Kubernetes.

    kubeadm config images list
    kubeadm config images pull

    sudo kubeadm init --pod-network-cidr=10.244.0.0/16

    Through kubectl, copy the admin certificate to the path $HOME/.kube/config to control the Kubernetes cluster.

    mkdir -p $HOME/.kube
    sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
    sudo chown $(id -u):$(id -g) $HOME/.kube/config

    Install CNI. There are various kinds of CNI, which is responsible for setting up the network inside Kubernetes, and in MLOps for All, flannel is used.

    kubectl apply -f https://raw.githubusercontent.com/flannel-io/flannel/v0.13.0/Documentation/kube-flannel.yml

    There are two types of Kubernetes nodes: Master Node and Worker Node. For stability, it is generally recommended that only tasks to control the Kubernetes cluster are run on the Master Node, however this manual assumes a single cluster, so all types of tasks can be run on the Master Node.

    kubectl taint nodes --all node-role.kubernetes.io/master-

    3. Setup Kubernetes Client

    Copy the kubeconfig file created in the cluster to the client to control the cluster through kubectl.

    mkdir -p $HOME/.kube
    scp -p {CLUSTER_USER_ID}@{CLUSTER_IP}:~/.kube/config ~/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check if the nodes are Ready and verify the OS, Docker, and Kubernetes versions.

    kubectl get nodes

    When the node is in the "Ready" state, the output will be similar to the following:

    NAME     STATUS   ROLES                  AGE     VERSION
    ubuntu Ready control-plane,master 2m55s v1.21.7

    6. References

    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html b/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html index d3dbfc2c..a46cd351 100644 --- a/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html +++ b/en/docs/setup-kubernetes/install-kubernetes/kubernetes-with-minikube/index.html @@ -7,7 +7,7 @@ - + @@ -25,8 +25,8 @@ users:
  • name: minikube user: client-certificate-data: LS0tLS1CRUdJTi.... -client-key-data: LS0tLS1CRUdJTiBSU0....

    1. Create the .kube folder on the client node:

      # Client node
      mkdir -p /home/$USER/.kube
    2. Paste the information obtained from Step 2 into the file and save it:

      vi /home/$USER/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If this message appears, it means that the installation has completed normally.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    - +client-key-data: LS0tLS1CRUdJTiBSU0....

    1. Create the .kube folder on the client node:

      # Client node
      mkdir -p /home/$USER/.kube
    2. Paste the information obtained from Step 2 into the file and save it:

      vi /home/$USER/.kube/config

    4. Install Kubernetes Default Modules

    Please refer to Setup Kubernetes Modules to install the following components:

    • helm
    • kustomize
    • CSI plugin
    • [Optional] nvidia-docker, nvidia-device-plugin

    5. Verify Successful Installation

    Finally, check that the node is Ready, and check the OS, Docker, and Kubernetes versions.

    kubectl get nodes -o wide

    If this message appears, it means that the installation has completed normally.

    NAME     STATUS   ROLES                  AGE     VERSION   INTERNAL-IP    EXTERNAL-IP   OS-IMAGE             KERNEL-VERSION     CONTAINER-RUNTIME
    ubuntu Ready control-plane,master 2d23h v1.21.7 192.168.0.75 <none> Ubuntu 20.04.3 LTS 5.4.0-91-generic docker://20.10.11
    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/install-prerequisite/index.html b/en/docs/setup-kubernetes/install-prerequisite/index.html index 6cdfc89e..bbd37190 100644 --- a/en/docs/setup-kubernetes/install-prerequisite/index.html +++ b/en/docs/setup-kubernetes/install-prerequisite/index.html @@ -7,14 +7,14 @@ - +
    Version: Next

    3. Install Prerequisite

    On this page, we describe the components that need to be installed or configured on the Cluster and Client prior to installing Kubernetes.

    Install apt packages

    In order to enable smooth communication between the Client and the Cluster, Port-Forwarding needs to be performed. To enable Port-Forwarding, the following packages need to be installed on the Cluster.

    sudo apt-get update
    sudo apt-get install -y socat

    Install Docker

    1. Install apt packages for docker.

      sudo apt-get update && sudo apt-get install -y ca-certificates curl gnupg lsb-release
    2. add docker official GPG key.

      curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
    3. When installing Docker using the apt package manager, configure it to retrieve from the stable repository:

      echo \
      "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
      $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
    4. Check the currently available Docker versions for installation:

      sudo apt-get update && apt-cache madison docker-ce

      Verify if the version 5:20.10.11~3-0~ubuntu-focal is listed among the output:

      apt-cache madison docker-ce | grep 5:20.10.11~3-0~ubuntu-focal

      If the addition was successful, the following output will be displayed:

      docker-ce | 5:20.10.11~3-0~ubuntu-focal | https://download.docker.com/linux/ubuntu focal/stable amd64 Packages
    5. Install Docker version 5:20.10.11~3-0~ubuntu-focal:

      sudo apt-get install -y containerd.io docker-ce=5:20.10.11~3-0~ubuntu-focal docker-ce-cli=5:20.10.11~3-0~ubuntu-focal

    6. Check docker is installed.

      sudo docker run hello-world

    If added successfully, it will output as follows:

    mlops@ubuntu:~$ sudo docker run hello-world

    Hello from Docker!
    This message shows that your installation appears to be working correctly.

    To generate this message, Docker took the following steps:
    1. The Docker client contacted the Docker daemon.
    2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
    (amd64)
    3. The Docker daemon created a new container from that image which runs the
    executable that produces the output you are currently reading.
    4. The Docker daemon streamed that output to the Docker client, which sent it
    to your terminal.

    To try something more ambitious, you can run an Ubuntu container with:
    $ docker run -it ubuntu bash

    Share images, automate workflows, and more with a free Docker ID:
    https://hub.docker.com/

    For more examples and ideas, visit:
    https://docs.docker.com/get-started/
    1. Add permissions to use Docker commands without the sudo keyword by executing the following commands:

      sudo groupadd docker
      sudo usermod -aG docker $USER
      newgrp docker
    2. To verify that you can now use Docker commands without sudo, run the docker run command again:

      docker run hello-world

      If you see the following message after executing the command, it means that the permissions have been successfully added:

      mlops@ubuntu:~$ docker run hello-world

      Hello from Docker!
      This message shows that your installation appears to be working correctly.

      To generate this message, Docker took the following steps:
      1. The Docker client contacted the Docker daemon.
      2. The Docker daemon pulled the "hello-world" image from the Docker Hub.
      (amd64)
      3. The Docker daemon created a new container from that image which runs the
      executable that produces the output you are currently reading.
      4. The Docker daemon streamed that output to the Docker client, which sent it
      to your terminal.

      To try something more ambitious, you can run an Ubuntu container with:
      $ docker run -it ubuntu bash

      Share images, automate workflows, and more with a free Docker ID:
      https://hub.docker.com/

      For more examples and ideas, visit:
      https://docs.docker.com/get-started/

    Turn off Swap Memory

    In order for kubelet to work properly, cluster nodes must turn off the virtual memory called swap. The following command turns off the swap.
    -(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)

    sudo sed -i '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab
    sudo swapoff -a

    Install Kubectl

    kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node.

    1. Download kubectl version v1.21.7 to the current folder:

      curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl
    2. Change the file permissions and move it to the appropriate location to make kubectl executable:

      sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
    3. Verify that kubectl is installed correctly:

      kubectl version --client

      If you see the following message, it means that kubectl is installed successfully:

      Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    4. If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:

    References

    - +(When using cluster and client on the same desktop, turning off swap memory may result in a slowdown in speed)

    sudo sed -i '/ swap / s/^\(.*\)$/#\1/g' /etc/fstab
    sudo swapoff -a

    Install Kubectl

    kubectl is a client tool used to make API requests to a Kubernetes cluster. It needs to be installed on the client node.

    1. Download kubectl version v1.21.7 to the current folder:

      curl -LO https://dl.k8s.io/release/v1.21.7/bin/linux/amd64/kubectl
    2. Change the file permissions and move it to the appropriate location to make kubectl executable:

      sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
    3. Verify that kubectl is installed correctly:

      kubectl version --client

      If you see the following message, it means that kubectl is installed successfully:

      Client Version: version.Info{Major:"1", Minor:"21", GitVersion:"v1.21.7", GitCommit:"1f86634ff08f37e54e8bfcd86bc90b61c98f84d4", GitTreeState:"clean", BuildDate:"2021-11-17T14:41:19Z", GoVersion:"go1.16.10", Compiler:"gc", Platform:"linux/amd64"}
    4. If you work with multiple Kubernetes clusters and need to manage multiple kubeconfig files or kube-contexts efficiently, you can refer to the following resources:

    References

    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/intro/index.html b/en/docs/setup-kubernetes/intro/index.html index e0c8a812..8f7cfb01 100644 --- a/en/docs/setup-kubernetes/intro/index.html +++ b/en/docs/setup-kubernetes/intro/index.html @@ -7,7 +7,7 @@ - + @@ -17,8 +17,8 @@ Therefore, if you are not using a cloud environment, you can install UTM, Virtual machines for Mac, to use virtual machines. (Purchasing and downloading software from the App Store is a form of donation-based payment. The free version is sufficient as it only differs in automatic updates.) This virtual machine software supports the Ubuntu 20.04.3 LTS practice operating system, enabling you to perform the exercises on an M1 Mac.

    However, since it is not possible to use all the elements described in the Components of MLOps, MLOps for ALL will mainly focus on installing the representative open source software and connecting them to each other.

    It is not meant that installing open source software in MLOps for ALL is a standard, and we recommend choosing the appropriate tool that fits your situation.

    Components

    The components of the MLOps system that we will make in this article and each version have been verified in the following environment.

    To facilitate smooth testing, I will explain the setup of the Cluster and Client as separate entities.

    The Cluster refers to a single desktop with Ubuntu installed.
    -The Client is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes.

    Cluster

    1. Software

    Below is the list of software that needs to be installed on the Cluster:

    SoftwareVersion
    Ubuntu20.04.3 LTS
    Docker (Server)20.10.11
    NVIDIA Driver470.86
    Kubernetesv1.21.7
    Kubeflowv1.4.0
    MLFlowv1.21.0

    2. Helm Chart

    Below is the list of third-party software that needs to be installed using Helm:

    Helm Chart Repo NameVersion
    datawire/ambassador6.9.3
    seldonio/seldon-core-operator1.11.2

    Client

    The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04.

    SoftwareVersion
    kubectlv1.21.7
    helmv3.7.1
    kustomizev3.10.0

    Minimum System Requirements

    It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:

    • CPU: 6 cores
    • RAM: 12GB
    • DISK: 50GB
    • GPU: NVIDIA GPU (optional)
    - +The Client is recommended to be a different desktop, such as a laptop or another desktop with access to the Cluster or Kubernetes installation. However, if you only have one machine available, you can use the same desktop for both Cluster and Client purposes.

    Cluster

    1. Software

    Below is the list of software that needs to be installed on the Cluster:

    SoftwareVersion
    Ubuntu20.04.3 LTS
    Docker (Server)20.10.11
    NVIDIA Driver470.86
    Kubernetesv1.21.7
    Kubeflowv1.4.0
    MLFlowv1.21.0

    2. Helm Chart

    Below is the list of third-party software that needs to be installed using Helm:

    Helm Chart Repo NameVersion
    datawire/ambassador6.9.3
    seldonio/seldon-core-operator1.11.2

    Client

    The Client has been validated on MacOS (Intel CPU) and Ubuntu 20.04.

    SoftwareVersion
    kubectlv1.21.7
    helmv3.7.1
    kustomizev3.10.0

    Minimum System Requirements

    It is recommended that the Cluster meet the following specifications, which are dependent on the recommended specifications for Kubernetes and Kubeflow:

    • CPU: 6 cores
    • RAM: 12GB
    • DISK: 50GB
    • GPU: NVIDIA GPU (optional)
    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/kubernetes/index.html b/en/docs/setup-kubernetes/kubernetes/index.html index cca94a96..155072d6 100644 --- a/en/docs/setup-kubernetes/kubernetes/index.html +++ b/en/docs/setup-kubernetes/kubernetes/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    2. Setup Kubernetes

    Setup Kubernetes Cluster

    For those learning Kubernetes for the first time, the first barrier to entry is setting up a Kubernetes practice environment.

    The official tool that supports building a production-level Kubernetes cluster is kubeadm, but there are also tools such as kubespray and kops that help users set up more easily, and tools such as k3s, minikube, microk8s, and kind that help you set up a compact Kubernetes cluster easily for learning purposes.

    Each tool has its own advantages and disadvantages, so considering the preferences of each user, this article will use three tools: kubeadm, k3s, and minikube to set up a Kubernetes cluster. For detailed comparisons of each tool, please refer to the official Kubernetes documentation.

    MLOps for ALL recommends k3s as a tool that is easy to use when setting up a Kubernetes cluster.

    If you want to use all the features of Kubernetes and configure the nodes, we recommend kubeadm.
    -minikube has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe.

    In this MLOps for ALL, in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools.

    The scope of this Setup Kubernetes section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster.

    The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with.

    - +minikube has the advantage of being able to easily install other Kubernetes in an add-on format, in addition to the components we describe.

    In this MLOps for ALL, in order to use the components that will be built for MLOps smoothly, there are additional settings that must be configured when building the Kubernetes cluster using each of the tools.

    The scope of this Setup Kubernetes section is to build a k8s cluster on a desktop that already has Ubuntu OS installed and to confirm that external client nodes can access the Kubernetes cluster.

    The detailed setup procedure is composed of the following flow, as each of the three tools has its own setup procedure.

    3. Setup Prerequisite
    4. Setup Kubernetes
    4.1. with k3s
    4.2. with minikube
    4.3. with kubeadm
    5. Setup Kubernetes Modules

    Let's now build a Kubernetes cluster by using each of the tools. You don't have to use all the tools, and you can use the tools that you are familiar with.

    + \ No newline at end of file diff --git a/en/docs/setup-kubernetes/setup-nvidia-gpu/index.html b/en/docs/setup-kubernetes/setup-nvidia-gpu/index.html index 6a8b5fb3..5780e37b 100644 --- a/en/docs/setup-kubernetes/setup-nvidia-gpu/index.html +++ b/en/docs/setup-kubernetes/setup-nvidia-gpu/index.html @@ -7,15 +7,15 @@ - +
    Version: Next

    6. (Optional) Setup GPU

    For using GPU in Kubernetes and Kubeflow, the following tasks are required.

    1. Install NVIDIA Driver

    If the following screen is output when executing nvidia-smi, please omit this step.

    mlops@ubuntu:~$ nvidia-smi 
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 7W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    | 0 N/A N/A 1644 G /usr/lib/xorg/Xorg 198MiB |
    | 0 N/A N/A 1893 G /usr/bin/gnome-shell 10MiB |
    | 1 N/A N/A 1644 G /usr/lib/xorg/Xorg 4MiB |
    +-----------------------------------------------------------------------------+

    If the output of nvidia-smi is not as above, please install the nvidia driver that fits your installed GPU.

    If you are not familiar with the installation of nvidia drivers, please install it through the following command.

    sudo add-apt-repository ppa:graphics-drivers/ppa
    sudo apt update && sudo apt install -y ubuntu-drivers-common
    sudo ubuntu-drivers autoinstall
    sudo reboot

    2. Install NVIDIA-Docker.

    Let's install NVIDIA-Docker.

    curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | \
    sudo apt-key add -
    distribution=$(. /etc/os-release;echo $ID$VERSION_ID)
    curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list
    sudo apt-get update
    sudo apt-get install -y nvidia-docker2 &&
    sudo systemctl restart docker

    To check if it is installed correctly, we will run the docker container using the GPU.

    sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi

    If the following message appears, it means that the installation was successful:

    mlops@ubuntu:~$ sudo docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi
    +-----------------------------------------------------------------------------+
    | NVIDIA-SMI 470.86 Driver Version: 470.86 CUDA Version: 11.4 |
    |-------------------------------+----------------------+----------------------+
    | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
    | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
    | | | MIG M. |
    |===============================+======================+======================|
    | 0 NVIDIA GeForce ... Off | 00000000:01:00.0 Off | N/A |
    | 25% 32C P8 4W / 120W | 211MiB / 6078MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+
    | 1 NVIDIA GeForce ... Off | 00000000:02:00.0 Off | N/A |
    | 0% 34C P8 6W / 175W | 5MiB / 7982MiB | 0% Default |
    | | | N/A |
    +-------------------------------+----------------------+----------------------+

    +-----------------------------------------------------------------------------+
    | Processes: |
    | GPU GI CI PID Type Process name GPU Memory |
    | ID ID Usage |
    |=============================================================================|
    +-----------------------------------------------------------------------------+

    3. Setting NVIDIA-Docker as the Default Container Runtime

    By default, Kubernetes uses Docker-CE as the default container runtime. To use NVIDIA GPU within Docker containers, you need to configure NVIDIA-Docker as the container runtime and modify the default runtime for creating pods.

    1. Open the /etc/docker/daemon.json file and make the following modifications:

      sudo vi /etc/docker/daemon.json

      {
      "default-runtime": "nvidia",
      "runtimes": {
      "nvidia": {
      "path": "nvidia-container-runtime",
      "runtimeArgs": []
      }
      }
      }
    2. After confirming the file changes, restart Docker.

      sudo systemctl daemon-reload
      sudo service docker restart
    3. Verify that the changes have been applied.

      sudo docker info | grep nvidia

      If you see the following message, it means that the installation was successful.

      mlops@ubuntu:~$ docker info | grep nvidia
      Runtimes: io.containerd.runc.v2 io.containerd.runtime.v1.linux nvidia runc
      Default Runtime: nvidia

    4. Nvidia-Device-Plugin

    1. Create the nvidia-device-plugin daemonset.

      kubectl create -f https://raw.githubusercontent.com/NVIDIA/k8s-device-plugin/v0.10.0/nvidia-device-plugin.yml
    2. Verify that the nvidia-device-plugin pod is in the RUNNING state.

      kubectl get pod -n kube-system | grep nvidia

    You should see the following output:

    kube-system   nvidia-device-plugin-daemonset-nlqh2   1/1     Running   0    1h
    1. Verify that the nodes have been configured to have GPUs available.

      kubectl get nodes "-o=custom-columns=NAME:.metadata.name,GPU:.status.allocatable.nvidia\.com/gpu"

      If you see the following message, it means that the configuration was successful.
      (In the MLOps for ALL* tutorial cluster, there are two GPUs, so the output is 2. -If the output shows the correct number of GPUs for your cluster, it is fine.)

      NAME       GPU
      ubuntu 2

      If it is not configured, the GPU value will be displayed as <None>.

    - +If the output shows the correct number of GPUs for your cluster, it is fine.)

    NAME       GPU
    ubuntu 2

    If it is not configured, the GPU value will be displayed as <None>.

    + \ No newline at end of file diff --git a/en/index.html b/en/index.html index 8e736bfd..8dd20ed0 100644 --- a/en/index.html +++ b/en/index.html @@ -7,13 +7,13 @@ - +
    -

    MLOps for ALL

    모두를 위한 MLOps

    Focus on What Matters

    MakinaRocks

    Sponsored by MakinaRocks

    이 프로젝트는 MakinaRocks의 지원을 받아 제작되었습니다.

    Easy to Use

    MLOps for MLE

    ML Engineer를 위한 MLOps Release!

    구글에서 제안한 MLOps 0단계를 직접 구현하며 MLOps 가 무엇인지 공부할 수 있는 튜토리얼을 오픈했습니다!

    - +

    MLOps for ALL

    모두를 위한 MLOps

    Focus on What Matters

    MakinaRocks

    Sponsored by MakinaRocks

    이 프로젝트는 MakinaRocks의 지원을 받아 제작되었습니다.

    Easy to Use

    MLOps for MLE

    ML Engineer를 위한 MLOps Release!

    구글에서 제안한 MLOps 0단계를 직접 구현하며 MLOps 가 무엇인지 공부할 수 있는 튜토리얼을 오픈했습니다!

    + \ No newline at end of file diff --git a/en/markdown-page/index.html b/en/markdown-page/index.html index aab2c098..f0265176 100644 --- a/en/markdown-page/index.html +++ b/en/markdown-page/index.html @@ -7,13 +7,13 @@ - +

    Markdown page example

    You don't need React to write simple standalone pages.

    - + \ No newline at end of file diff --git a/index.html b/index.html index 6cd52064..d0f6d72a 100644 --- a/index.html +++ b/index.html @@ -7,13 +7,13 @@ - +
    -

    MLOps for ALL

    모두를 위한 MLOps

    Focus on What Matters

    MakinaRocks

    Sponsored by MakinaRocks

    이 프로젝트는 MakinaRocks의 지원을 받아 제작되었습니다.

    Easy to Use

    MLOps for MLE

    ML Engineer를 위한 MLOps Release!

    구글에서 제안한 MLOps 0단계를 직접 구현하며 MLOps 가 무엇인지 공부할 수 있는 튜토리얼을 오픈했습니다!

    - +

    MLOps for ALL

    모두를 위한 MLOps

    Focus on What Matters

    MakinaRocks

    Sponsored by MakinaRocks

    이 프로젝트는 MakinaRocks의 지원을 받아 제작되었습니다.

    Easy to Use

    MLOps for MLE

    ML Engineer를 위한 MLOps Release!

    구글에서 제안한 MLOps 0단계를 직접 구현하며 MLOps 가 무엇인지 공부할 수 있는 튜토리얼을 오픈했습니다!

    + \ No newline at end of file diff --git a/markdown-page/index.html b/markdown-page/index.html index 154a1811..fda400c2 100644 --- a/markdown-page/index.html +++ b/markdown-page/index.html @@ -7,13 +7,13 @@ - +

    Markdown page example

    You don't need React to write simple standalone pages.

    - + \ No newline at end of file